From be159f50c6046c766978773cfc03c4ff23ca1d04 Mon Sep 17 00:00:00 2001 From: Andy Geach Date: Wed, 6 Sep 2023 10:50:21 +0100 Subject: [PATCH 001/235] adds a new 'temp' way to manage logging attributes --- syslogging/adapter.py | 39 +++++++++++++++++++++++---------------- 1 file changed, 23 insertions(+), 16 deletions(-) diff --git a/syslogging/adapter.py b/syslogging/adapter.py index 9d2df58cd7..122c8a846b 100644 --- a/syslogging/adapter.py +++ b/syslogging/adapter.py @@ -25,7 +25,7 @@ def process(self, msg, kwargs): new_kwargs = dict() method = kwargs.pop("method", "overwrite") - if method not in ["clear", "preserve", "overwrite"]: + if method not in ["clear", "preserve", "overwrite", "temp"]: raise ValueError(f"Invalid value for 'method': {method}") for k, v in kwargs.items(): @@ -34,22 +34,29 @@ def process(self, msg, kwargs): else: new_kwargs[k] = v - merged = self._merge_attributes(method, attrs) - new_kwargs["extra"] = merged - self.extra = merged - - if self.extra: - return "%s %s" % (self.extra, msg), new_kwargs + """ + Four possible ways to deal with attributes + 1. temp: passed values overwrite existing for one message, then discarded + 2. clear: clear existing, use passed values + 3. preserve: merge with existing values preserved + 4. overwrite: merge with existing values overwritten + """ + if method == "temp": + if self.extra: + return "%s %s" % ({**self.extra, **attrs}, msg), new_kwargs + else: + return "%s %s" % (attrs, msg), new_kwargs else: - return "%s" % msg, new_kwargs + merged = self._merge_attributes(method, attrs) + new_kwargs["extra"] = merged + self.extra = merged + + if self.extra: + return "%s %s" % (self.extra, msg), new_kwargs + else: + return "%s" % msg, new_kwargs def _merge_attributes(self, method, attributes): - """ - Three possible ways to deal with attributes - 1. clear: clear existing, use passed values - 2. preserve: merge with existing values preserved - 3. overwrite: merge with existing values overwritten - """ if not self.extra or method == "clear": merged = attributes elif method == "preserve": @@ -63,7 +70,7 @@ def setup(self, **kwargs): # Create a copy of me with different attributes warnings.warn( "The 'setup' function is deprecated; instead, " - "update attributes with method=clear/preserve/overwrite", + "update attributes with method=clear/preserve/overwrite/temp", DeprecationWarning, 2, ) @@ -75,7 +82,7 @@ def label(self, **kwargs): # permanently add new attributes to me warnings.warn( "The 'label' function is deprecated; instead, " - "update attributes with method=clear/preserve/overwrite", + "update attributes with method=clear/preserve/overwrite/temp", DeprecationWarning, 2, ) From ddadcbf269c779088450106d53cd42e09aa3cfaa Mon Sep 17 00:00:00 2001 From: Andy Geach Date: Wed, 6 Sep 2023 10:50:54 +0100 Subject: [PATCH 002/235] poc for new 'temp' way to manage logging attributes --- examples/logging/poc.py | 15 ++++++++++----- 1 file changed, 10 insertions(+), 5 deletions(-) diff --git a/examples/logging/poc.py b/examples/logging/poc.py index 1b5b1af0c6..85fa67f6ec 100644 --- a/examples/logging/poc.py +++ b/examples/logging/poc.py @@ -67,17 +67,22 @@ stage="one", ) +# merging attributes: method 'temp' +temp = get_logger("temp", {"type": "first"}) +temp.info("type should be 'first'") +temp.info( + "type should be 'second' temporarily", + method="temp", + type="second", +) +temp.info("type should be back to 'first'") + # levels level = get_logger("Level") level.setLevel(logging.WARNING) level.info("does not print") level.warning("does print") -# level aliases -alias = get_logger("Alias") -alias.msg("msg() is a temporary alias for DEBUG") -alias.terse("terse() is a temporary alias for INFO") -alias.warn("warn() is a temporary alias for %s", "WARNING") # alias 'setup' setup = get_logger("Setup", {"stage": "one", "type": "first"}) From fc1df5ac0674056d7f9a0c5f24214978ada18e70 Mon Sep 17 00:00:00 2001 From: Andy Geach Date: Wed, 6 Sep 2023 12:13:16 +0100 Subject: [PATCH 003/235] removing log.setup_empty_except_keep_type() --- examples/logging/poc.py | 6 ------ sysbrokers/IB/ib_connection.py | 18 ++++++++++-------- sysdata/data_blob.py | 2 +- syslogging/adapter.py | 13 ------------- syslogging/tests/logging_tests.py | 17 ----------------- 5 files changed, 11 insertions(+), 45 deletions(-) diff --git a/examples/logging/poc.py b/examples/logging/poc.py index 85fa67f6ec..f4766769fe 100644 --- a/examples/logging/poc.py +++ b/examples/logging/poc.py @@ -96,11 +96,5 @@ label.label(stage="two") label.info("stage two") -# alias 'setup_empty_except_keep_type' -keep_type = get_logger("Keep_Type", {"type": "first", "stage": "one"}) -keep_type.info("type first, stage one") -keep_type = keep_type.setup_empty_except_keep_type() -keep_type.info("type first, no stage") - # critical mail level.critical("sends mail") diff --git a/sysbrokers/IB/ib_connection.py b/sysbrokers/IB/ib_connection.py index 23a0bdfc48..679434c0a6 100644 --- a/sysbrokers/IB/ib_connection.py +++ b/sysbrokers/IB/ib_connection.py @@ -28,13 +28,13 @@ def __init__( ib_ipaddress: str = arg_not_supplied, ib_port: int = arg_not_supplied, account: str = arg_not_supplied, - log: pst_logger = get_logger("connectionIB"), + log_name: str = "connectionIB", ): """ :param client_id: client id :param ipaddress: IP address of machine running IB Gateway or TWS. If not passed then will get from private config file, or defaults :param port: Port listened to by IB Gateway or TWS - :param log: logging object + :param log_name: calling log name :param mongo_db: mongoDB connection """ @@ -50,7 +50,14 @@ def __init__( # mongoIBclientIDtracker(database_name="another") # If you copy for another broker include these lines - self._init_log(log, client_id) + self._log = get_logger( + "connectionIB", + **{ + TYPE_LOG_LABEL: log_name, + BROKER_LOG_LABEL: "IB", + CLIENTID_LOG_LABEL: client_id, + } + ) # You can pass a client id yourself, or let IB find one @@ -58,11 +65,6 @@ def __init__( ipaddress=ipaddress, port=port, client_id=client_id, account=account ) - def _init_log(self, log, client_id: int): - new_log = log.setup_empty_except_keep_type() - new_log.label(**{BROKER_LOG_LABEL: "IB", CLIENTID_LOG_LABEL: client_id}) - self._log = new_log - def _init_connection( self, ipaddress: str, port: int, client_id: int, account=arg_not_supplied ): diff --git a/sysdata/data_blob.py b/sysdata/data_blob.py index 9d67488897..7c11be2943 100644 --- a/sysdata/data_blob.py +++ b/sysdata/data_blob.py @@ -279,7 +279,7 @@ def _get_new_ib_connection(self) -> connectionIB: client_id = self._get_next_client_id_for_ib() while True: try: - ib_conn = connectionIB(client_id, log=self.log) + ib_conn = connectionIB(client_id, log_name=self.log_name) for id in failed_ids: self.db_ib_broker_client_id.release_clientid(id) return ib_conn diff --git a/syslogging/adapter.py b/syslogging/adapter.py index 122c8a846b..b02c689686 100644 --- a/syslogging/adapter.py +++ b/syslogging/adapter.py @@ -93,19 +93,6 @@ def label(self, **kwargs): self._check_attributes(attributes) self.extra = attributes - def setup_empty_except_keep_type(self): - warnings.warn( - "The 'setup_empty_except_keep_type' function is deprecated; instead, " - "update attributes with method=clear/preserve/overwrite", - DeprecationWarning, - 2, - ) - if self.extra and TYPE_LOG_LABEL in self.extra: - attributes = {TYPE_LOG_LABEL: self.extra[TYPE_LOG_LABEL]} - else: - attributes = {} - return DynamicAttributeLogger(logging.getLogger(self.name), attributes) - def _check_attributes(self, attributes: dict): if attributes: bad_attributes = get_list_of_disallowed_attributes(attributes) diff --git a/syslogging/tests/logging_tests.py b/syslogging/tests/logging_tests.py index 581f6a49aa..90ae8c7960 100644 --- a/syslogging/tests/logging_tests.py +++ b/syslogging/tests/logging_tests.py @@ -93,20 +93,3 @@ def test_label_bad(self): logger = get_logger("my_type", {"stage": "bar"}) with pytest.raises(Exception): logger.label(stage="left", foo="bar") - - def test_setup_empty_with(self): - setup_with = get_logger("Setup_With", {"type": "foo", "stage": "one"}) - assert setup_with.extra["type"] == "foo" - assert setup_with.extra["stage"] == "one" - setup_with = setup_with.setup_empty_except_keep_type() - assert setup_with.extra["type"] == "foo" - assert "stage" not in setup_with.extra - - def test_setup_empty_without(self): - setup_without = get_logger("Setup_Without", {"stage": "one"}) - setup_without = setup_without.setup_empty_except_keep_type() - assert "type" not in setup_without.extra - - setup_without = get_logger("Setup_Without") - setup_without = setup_without.setup_empty_except_keep_type() - assert "type" not in setup_without.extra From b584da8778d915636011a88972a5826327a909f6 Mon Sep 17 00:00:00 2001 From: Andy Geach Date: Wed, 6 Sep 2023 14:08:03 +0100 Subject: [PATCH 004/235] removing log.label() --- examples/logging/poc.py | 14 ++++++++------ syscontrol/strategy_tools.py | 4 +++- sysdata/futures/instruments.py | 4 ++-- sysdata/futures/roll_calendars.py | 4 ++-- sysdata/futures/rolls_parameters.py | 4 ++-- syslogging/tests/logging_tests.py | 16 ---------------- .../interactive_manual_check_fx_prices.py | 2 +- ...interactive_manual_check_historical_prices.py | 4 +++- sysproduction/update_fx_prices.py | 2 +- sysproduction/update_historical_prices.py | 2 +- sysproduction/update_multiple_adjusted_prices.py | 2 +- 11 files changed, 24 insertions(+), 34 deletions(-) diff --git a/examples/logging/poc.py b/examples/logging/poc.py index f4766769fe..7ded0c1e2d 100644 --- a/examples/logging/poc.py +++ b/examples/logging/poc.py @@ -90,11 +90,13 @@ setup = setup.setup(stage="two") setup.info("stage two, no type") -# alias 'label' -label = get_logger("Label", {"stage": "one"}) -label.info("stage one") -label.label(stage="two") -label.info("stage two") +# replacing log.label() - we want to update the log attributes permanently - same as +# overwrite +label = get_logger("label", {"stage": "whatever"}) +label.info("Should have 'stage' of 'whatever'") +label.info("Updating log attributes", instrument_code="GOLD") +label.info("Should have 'stage' of 'whatever', and 'instrument_code' 'GOLD'") + # critical mail -level.critical("sends mail") +# level.critical("sends mail") diff --git a/syscontrol/strategy_tools.py b/syscontrol/strategy_tools.py index a2a9ea7396..788b7e4333 100644 --- a/syscontrol/strategy_tools.py +++ b/syscontrol/strategy_tools.py @@ -61,7 +61,9 @@ def get_strategy_class_instance(data: dataBlob, strategy_name: str, process_name ) strategy_data = dataBlob(log_name=process_name) - strategy_data.log.label(**{STRATEGY_NAME_LOG_LABEL: strategy_name}) + strategy_data.log.debug( + "Updating log attributes", **{STRATEGY_NAME_LOG_LABEL: strategy_name} + ) strategy_class_instance = strategy_class_object( strategy_data, strategy_name, **other_args diff --git a/sysdata/futures/instruments.py b/sysdata/futures/instruments.py index b13167fc52..1795e5e216 100644 --- a/sysdata/futures/instruments.py +++ b/sysdata/futures/instruments.py @@ -94,7 +94,7 @@ def get_instrument_data( return futuresInstrumentWithMetaData.create_empty() def delete_instrument_data(self, instrument_code: str, are_you_sure: bool = False): - self.log.label(instrument_code=instrument_code) + self.log.debug("Updating log attributes", instrument_code=instrument_code) if are_you_sure: if self.is_code_in_data(instrument_code): @@ -124,7 +124,7 @@ def add_instrument_data( ): instrument_code = instrument_object.instrument_code - self.log.label(instrument_code=instrument_code) + self.log.log(0, "Updating log attributes", instrument_code=instrument_code) if self.is_code_in_data(instrument_code): if ignore_duplication: diff --git a/sysdata/futures/roll_calendars.py b/sysdata/futures/roll_calendars.py index 34955d1918..88084f7260 100644 --- a/sysdata/futures/roll_calendars.py +++ b/sysdata/futures/roll_calendars.py @@ -33,7 +33,7 @@ def get_roll_calendar(self, instrument_code: str) -> rollCalendar: raise Exception("Calendar for %s not found!" % instrument_code) def delete_roll_calendar(self, instrument_code: str, are_you_sure=False): - self.log.label(instrument_code=instrument_code) + self.log.debug("Updating log attributes", instrument_code=instrument_code) if are_you_sure: if self.is_code_in_data(instrument_code): @@ -60,7 +60,7 @@ def add_roll_calendar( ignore_duplication: bool = False, ): - self.log.label(instrument_code=instrument_code) + self.log.log(0, "Updating log attributes", instrument_code=instrument_code) if self.is_code_in_data(instrument_code): if ignore_duplication: diff --git a/sysdata/futures/rolls_parameters.py b/sysdata/futures/rolls_parameters.py index b7712ac976..7f8ff9e39c 100644 --- a/sysdata/futures/rolls_parameters.py +++ b/sysdata/futures/rolls_parameters.py @@ -40,7 +40,7 @@ def get_roll_parameters(self, instrument_code: str) -> rollParameters: ) def delete_roll_parameters(self, instrument_code: str, are_you_sure: bool = False): - self.log.label(instrument_code=instrument_code) + self.log.debug("Updating log attributes", instrument_code=instrument_code) if are_you_sure: if self.is_code_in_data(instrument_code): @@ -67,7 +67,7 @@ def add_roll_parameters( ignore_duplication: bool = False, ): - self.log.label(instrument_code=instrument_code) + self.log.log(0, "Updating log attributes", instrument_code=instrument_code) if self.is_code_in_data(instrument_code): if ignore_duplication: diff --git a/syslogging/tests/logging_tests.py b/syslogging/tests/logging_tests.py index 90ae8c7960..7e06459435 100644 --- a/syslogging/tests/logging_tests.py +++ b/syslogging/tests/logging_tests.py @@ -77,19 +77,3 @@ def test_setup_bad(self): logger = get_logger("my_type", {"stage": "bar"}) with pytest.raises(Exception): logger.setup(foo="bar") - - def test_label(self): - logger = get_logger("my_type", {"stage": "bar"}) - logger.label(stage="left", instrument_code="ABC") - assert logger.name == "my_type" - assert logger.extra["stage"] == "left" - assert logger.extra["instrument_code"] == "ABC" - - no_attrs = get_logger("no_attrs") - no_attrs.label(instrument_code="XYZ") - assert no_attrs.extra["instrument_code"] == "XYZ" - - def test_label_bad(self): - logger = get_logger("my_type", {"stage": "bar"}) - with pytest.raises(Exception): - logger.label(stage="left", foo="bar") diff --git a/sysproduction/interactive_manual_check_fx_prices.py b/sysproduction/interactive_manual_check_fx_prices.py index 1ba6f273f4..4f5f074e9f 100644 --- a/sysproduction/interactive_manual_check_fx_prices.py +++ b/sysproduction/interactive_manual_check_fx_prices.py @@ -32,7 +32,7 @@ def interactive_manual_check_fx_prices(): if fx_code is EXIT_STR: do_another = False ## belt. Also braces. else: - data.log.label(currency_code=fx_code) + data.log.debug("Updating log attributes", currency_code=fx_code) check_fx_ok_for_broker(data, fx_code) update_manual_check_fx_prices_for_code(fx_code, data) diff --git a/sysproduction/interactive_manual_check_historical_prices.py b/sysproduction/interactive_manual_check_historical_prices.py index f9152bf65e..b172ef324a 100644 --- a/sysproduction/interactive_manual_check_historical_prices.py +++ b/sysproduction/interactive_manual_check_historical_prices.py @@ -39,7 +39,9 @@ def interactive_manual_check_historical_prices(): do_another = False else: check_instrument_ok_for_broker(data, instrument_code) - data.log.label(instrument_code=instrument_code) + data.log.debug( + "Updating log attributes", instrument_code=instrument_code + ) update_historical_prices_for_instrument( instrument_code=instrument_code, cleaning_config=cleaning_config, diff --git a/sysproduction/update_fx_prices.py b/sysproduction/update_fx_prices.py index 7cc8f0f661..2d0b39f880 100644 --- a/sysproduction/update_fx_prices.py +++ b/sysproduction/update_fx_prices.py @@ -44,7 +44,7 @@ def update_fx_prices_with_data(data: dataBlob): data.log.debug("FX Codes: %s" % str(list_of_codes_all)) for fx_code in list_of_codes_all: - data.log.label(**{CURRENCY_CODE_LOG_LABEL: fx_code}) + data.log.debug("Updating log attributes", **{CURRENCY_CODE_LOG_LABEL: fx_code}) update_fx_prices_for_code(fx_code, data) diff --git a/sysproduction/update_historical_prices.py b/sysproduction/update_historical_prices.py index ea6d104495..5f03a17179 100644 --- a/sysproduction/update_historical_prices.py +++ b/sysproduction/update_historical_prices.py @@ -282,7 +282,7 @@ def update_historical_prices_for_list_of_instrument_codes( cleaning_config = get_config_for_price_filtering(data) for instrument_code in list_of_instrument_codes: - data.log.label(instrument_code=instrument_code) + data.log.debug("Updating log attributes", instrument_code=instrument_code) update_historical_prices_for_instrument( instrument_code, data, diff --git a/sysproduction/update_multiple_adjusted_prices.py b/sysproduction/update_multiple_adjusted_prices.py index 61b97b436e..e71ab3e424 100644 --- a/sysproduction/update_multiple_adjusted_prices.py +++ b/sysproduction/update_multiple_adjusted_prices.py @@ -110,7 +110,7 @@ def update_multiple_adjusted_prices_for_instrument( :return: None """ - data.log.label(instrument_code=instrument_code) + data.log.debug("Updating log attributes", instrument_code=instrument_code) updated_multiple_prices = calc_updated_multiple_prices(data, instrument_code) updated_adjusted_prices = calc_update_adjusted_prices( data, instrument_code, updated_multiple_prices From 0698796960f89e86a6ee0375ef2c5e2108ce9d08 Mon Sep 17 00:00:00 2001 From: Andy Geach Date: Wed, 6 Sep 2023 14:09:09 +0100 Subject: [PATCH 005/235] removing log.label() --- syslogging/adapter.py | 15 --------------- 1 file changed, 15 deletions(-) diff --git a/syslogging/adapter.py b/syslogging/adapter.py index b02c689686..ece8771f0e 100644 --- a/syslogging/adapter.py +++ b/syslogging/adapter.py @@ -78,21 +78,6 @@ def setup(self, **kwargs): self._check_attributes(attributes) return DynamicAttributeLogger(logging.getLogger(self.name), attributes) - def label(self, **kwargs): - # permanently add new attributes to me - warnings.warn( - "The 'label' function is deprecated; instead, " - "update attributes with method=clear/preserve/overwrite/temp", - DeprecationWarning, - 2, - ) - if not self.extra: - attributes = {**kwargs} - else: - attributes = {**self.extra, **kwargs} - self._check_attributes(attributes) - self.extra = attributes - def _check_attributes(self, attributes: dict): if attributes: bad_attributes = get_list_of_disallowed_attributes(attributes) From 69751aeb21c668bf29672b7eb6646b05fb43775d Mon Sep 17 00:00:00 2001 From: Andy Geach Date: Wed, 6 Sep 2023 14:37:09 +0100 Subject: [PATCH 006/235] black magic --- sysbrokers/IB/ib_connection.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sysbrokers/IB/ib_connection.py b/sysbrokers/IB/ib_connection.py index 679434c0a6..2367fa40f8 100644 --- a/sysbrokers/IB/ib_connection.py +++ b/sysbrokers/IB/ib_connection.py @@ -56,7 +56,7 @@ def __init__( TYPE_LOG_LABEL: log_name, BROKER_LOG_LABEL: "IB", CLIENTID_LOG_LABEL: client_id, - } + }, ) # You can pass a client id yourself, or let IB find one From 8870d1c90d10ecdcb4de0cf17bbf4d2cda856843 Mon Sep 17 00:00:00 2001 From: Andy Geach Date: Wed, 6 Sep 2023 16:00:11 +0100 Subject: [PATCH 007/235] fix IB connection logging setup --- sysbrokers/IB/ib_connection.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sysbrokers/IB/ib_connection.py b/sysbrokers/IB/ib_connection.py index 2367fa40f8..b72b226aa2 100644 --- a/sysbrokers/IB/ib_connection.py +++ b/sysbrokers/IB/ib_connection.py @@ -52,7 +52,7 @@ def __init__( # If you copy for another broker include these lines self._log = get_logger( "connectionIB", - **{ + { TYPE_LOG_LABEL: log_name, BROKER_LOG_LABEL: "IB", CLIENTID_LOG_LABEL: client_id, From fdacee04dbddf739f7fd99a09d768abfb80c81f1 Mon Sep 17 00:00:00 2001 From: rob Date: Mon, 18 Sep 2023 11:37:49 +0100 Subject: [PATCH 008/235] bug fix in cost calculation --- systems/accounts/pandl_calculators/pandl_cash_costs.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/systems/accounts/pandl_calculators/pandl_cash_costs.py b/systems/accounts/pandl_calculators/pandl_cash_costs.py index afbb3a0715..15f830becc 100644 --- a/systems/accounts/pandl_calculators/pandl_cash_costs.py +++ b/systems/accounts/pandl_calculators/pandl_cash_costs.py @@ -194,7 +194,9 @@ def normalise_costs_in_instrument_currency(self, costs_as_pd_series) -> pd.Serie return costs_as_pd_series cost_deflator = self.cost_deflator() - reindexed_deflator = cost_deflator.reindex(costs_as_pd_series.index).ffill() + reindexed_deflator = cost_deflator.reindex( + costs_as_pd_series.index, method="ffill" + ) normalised_costs = reindexed_deflator * costs_as_pd_series From 29f6a45267ec545e74f6cf60e4a9f7e1cef27170 Mon Sep 17 00:00:00 2001 From: rob Date: Sat, 23 Sep 2023 15:47:14 +0100 Subject: [PATCH 009/235] spawn child orders clearer logic --- .../stack_handler/spawn_children_from_instrument_orders.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/sysexecution/stack_handler/spawn_children_from_instrument_orders.py b/sysexecution/stack_handler/spawn_children_from_instrument_orders.py index 1d1a280af6..54db138d70 100644 --- a/sysexecution/stack_handler/spawn_children_from_instrument_orders.py +++ b/sysexecution/stack_handler/spawn_children_from_instrument_orders.py @@ -261,7 +261,7 @@ def get_required_contract_trade_for_instrument( instrument_code ) or diag_positions.is_roll_state_adjusted(instrument_code): ## do nothing - pass + return [] elif diag_positions.is_double_sided_trade_roll_state(instrument_code): order_reduces_positions = is_order_reducing_order( @@ -280,7 +280,7 @@ def get_required_contract_trade_for_instrument( ) else: ## do nothing - pass + return [] else: log.critical( From bc2cc48a154378fe79dac879dc9e3de73d2bc239 Mon Sep 17 00:00:00 2001 From: Andy Geach Date: Mon, 25 Sep 2023 10:49:03 +0100 Subject: [PATCH 010/235] fix zero level log statements --- sysdata/futures/instruments.py | 2 +- sysdata/futures/roll_calendars.py | 2 +- sysdata/futures/rolls_parameters.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/sysdata/futures/instruments.py b/sysdata/futures/instruments.py index 1795e5e216..4e334fbea1 100644 --- a/sysdata/futures/instruments.py +++ b/sysdata/futures/instruments.py @@ -124,7 +124,7 @@ def add_instrument_data( ): instrument_code = instrument_object.instrument_code - self.log.log(0, "Updating log attributes", instrument_code=instrument_code) + self.log.debug("Updating log attributes", instrument_code=instrument_code) if self.is_code_in_data(instrument_code): if ignore_duplication: diff --git a/sysdata/futures/roll_calendars.py b/sysdata/futures/roll_calendars.py index 88084f7260..064d9adec4 100644 --- a/sysdata/futures/roll_calendars.py +++ b/sysdata/futures/roll_calendars.py @@ -60,7 +60,7 @@ def add_roll_calendar( ignore_duplication: bool = False, ): - self.log.log(0, "Updating log attributes", instrument_code=instrument_code) + self.log.debug("Updating log attributes", instrument_code=instrument_code) if self.is_code_in_data(instrument_code): if ignore_duplication: diff --git a/sysdata/futures/rolls_parameters.py b/sysdata/futures/rolls_parameters.py index 7f8ff9e39c..f612982358 100644 --- a/sysdata/futures/rolls_parameters.py +++ b/sysdata/futures/rolls_parameters.py @@ -67,7 +67,7 @@ def add_roll_parameters( ignore_duplication: bool = False, ): - self.log.log(0, "Updating log attributes", instrument_code=instrument_code) + self.log.debug("Updating log attributes", instrument_code=instrument_code) if self.is_code_in_data(instrument_code): if ignore_duplication: From 9d98bfcbf24b0d4f2001022d93e23cd3159de042 Mon Sep 17 00:00:00 2001 From: Andy Geach Date: Mon, 25 Sep 2023 11:42:44 +0100 Subject: [PATCH 011/235] updating logging docs --- docs/production.md | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/docs/production.md b/docs/production.md index 3203400068..edc6e3eb62 100644 --- a/docs/production.md +++ b/docs/production.md @@ -799,7 +799,7 @@ For other methods, like `label()`, `setup()`, each should be taken on a case by > Multiple calls to getLogger() with the same name will always return a reference to the same Logger object. -So our outer object handles the context attributes, and the inner `logging.Logger` object does the rest. We cannot copy logger instances as we did with the legacy system. Instead, we can manage the attributes with three ways to merge: *overwrite* (the default), *preserve*, and *clear*. +So our outer object handles the context attributes, and the inner `logging.Logger` object does the rest. We cannot copy logger instances as we did with the legacy system. Instead, we can manage the attributes with four ways to merge: *overwrite* (the default), *preserve*, *clear*, and *temp*. ```python # merging attributes: method 'overwrite' (default if no method supplied) @@ -824,6 +824,16 @@ So our outer object handles the context attributes, and the inner `logging.Logge clear.info("clear, type 'first', stage 'one'") clear.info("clear, type 'second', no stage", method="clear", type="second") clear.info("clear, no attributes", method="clear") + + # merging attributes: method 'temp' + temp = get_logger("temp", {"type": "first"}) + temp.info("type should be 'first'") + temp.info( + "type should be 'second' temporarily", + method="temp", + type="second", + ) + temp.info("type should be back to 'first'") ``` #### Cleaning old logs From d6b2360cbad4409259530d37294cb634944486fb Mon Sep 17 00:00:00 2001 From: Andy Geach Date: Mon, 25 Sep 2023 11:43:04 +0100 Subject: [PATCH 012/235] adding temp logging attributes test --- syslogging/tests/logging_tests.py | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/syslogging/tests/logging_tests.py b/syslogging/tests/logging_tests.py index 7e06459435..4e843581df 100644 --- a/syslogging/tests/logging_tests.py +++ b/syslogging/tests/logging_tests.py @@ -63,6 +63,21 @@ def test_attributes_overwrite(self, caplog): ) ] + def test_attributes_temp(self, caplog): + temp = get_logger("temp", {"stage": "first"}) + temp.info("setting temp 'type' attribute", method="temp", type="one") + assert caplog.record_tuples[0] == ( + "temp", + logging.INFO, + "{'stage': 'first', 'type': 'one'} setting temp 'type' attribute", + ) + temp.info("no type attribute") + assert caplog.record_tuples[1] == ( + "temp", + logging.INFO, + "{'stage': 'first'} no type attribute", + ) + def test_setup(self): logger = get_logger("my_type", {"stage": "bar"}) logger = logger.setup(stage="left") From fe27798c0d1425fec641b40db406a5f1ca320482 Mon Sep 17 00:00:00 2001 From: Andy Geach Date: Mon, 25 Sep 2023 12:16:02 +0100 Subject: [PATCH 013/235] log.warn() -> log.warning() --- sysbrokers/IB/ib_Fx_prices_data.py | 4 ++-- sysbrokers/IB/ib_instruments_data.py | 2 +- syslogging/tests/logging_tests.py | 4 ++-- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/sysbrokers/IB/ib_Fx_prices_data.py b/sysbrokers/IB/ib_Fx_prices_data.py index ca1f2000a6..7da66b699c 100644 --- a/sysbrokers/IB/ib_Fx_prices_data.py +++ b/sysbrokers/IB/ib_Fx_prices_data.py @@ -41,7 +41,7 @@ def get_list_of_fxcodes(self) -> list: try: config_data = self._get_ib_fx_config() except missingFile: - self.log.warn("Can't get list of fxcodes for IB as config file missing") + self.log.warning("Can't get list of fxcodes for IB as config file missing") return [] list_of_codes = get_list_of_codes(config_data=config_data) @@ -104,7 +104,7 @@ def _get_config_info_for_code(self, currency_code: str) -> ibFXConfig: config_data = self._get_ib_fx_config() except missingFile as e: new_log = self.log.setup(**{CURRENCY_CODE_LOG_LABEL: currency_code}) - new_log.warn( + new_log.warning( "Can't get IB FX config for %s as config file missing" % currency_code ) raise missingInstrument from e diff --git a/sysbrokers/IB/ib_instruments_data.py b/sysbrokers/IB/ib_instruments_data.py index ab32c2ccfe..8077bfa01e 100644 --- a/sysbrokers/IB/ib_instruments_data.py +++ b/sysbrokers/IB/ib_instruments_data.py @@ -68,7 +68,7 @@ def get_list_of_instruments(self) -> list: try: config = self.ib_config except missingFile: - self.log.warn( + self.log.warning( "Can't get list of instruments because IB config file missing" ) return [] diff --git a/syslogging/tests/logging_tests.py b/syslogging/tests/logging_tests.py index 4e843581df..eebf7116b7 100644 --- a/syslogging/tests/logging_tests.py +++ b/syslogging/tests/logging_tests.py @@ -13,7 +13,7 @@ def test_name(self, caplog): def test_attributes_good(self, caplog): logger = get_logger("my_type", {"stage": "bar"}) - logger.warn("foo %s", "bar") + logger.warning("foo %s", "bar") assert caplog.record_tuples == [ ("my_type", logging.WARNING, "{'stage': 'bar'} foo bar") ] @@ -24,7 +24,7 @@ def test_attributes_bad(self): def test_no_name_no_attributes(self, caplog): logger = get_logger("") - logger.warn("foo") + logger.warning("foo") assert caplog.record_tuples == [("root", logging.WARNING, "foo")] def test_attributes_clear(self, caplog): From 0315eac1d895f33c993a75e808c65bef15105883 Mon Sep 17 00:00:00 2001 From: rob Date: Wed, 27 Sep 2023 09:22:20 +0100 Subject: [PATCH 014/235] small bug in static reporting --- sysproduction/reporting/adhoc/static_system.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sysproduction/reporting/adhoc/static_system.py b/sysproduction/reporting/adhoc/static_system.py index 41b16dbbbd..6d389920a4 100644 --- a/sysproduction/reporting/adhoc/static_system.py +++ b/sysproduction/reporting/adhoc/static_system.py @@ -40,7 +40,7 @@ def static_system_adhoc_report( capital, est_number_of_instruments, ) in list_of_capital_and_estimate_instrument_count_tuples: - system = futures_system() + system = system_function() instrument_list = static_system_results_for_capital( system, corr_matrix=corr_matrix, From 9fd9abcbe16631dd6865d3f6f1fd20ae7de29db1 Mon Sep 17 00:00:00 2001 From: Andy Geach Date: Wed, 27 Sep 2023 14:05:13 +0100 Subject: [PATCH 015/235] clear log attributes between iterations --- sysproduction/interactive_manual_check_fx_prices.py | 6 +++++- sysproduction/interactive_manual_check_historical_prices.py | 4 +++- sysproduction/update_fx_prices.py | 6 +++++- sysproduction/update_historical_prices.py | 6 +++++- sysproduction/update_multiple_adjusted_prices.py | 6 +++++- 5 files changed, 23 insertions(+), 5 deletions(-) diff --git a/sysproduction/interactive_manual_check_fx_prices.py b/sysproduction/interactive_manual_check_fx_prices.py index 4f5f074e9f..a320f2bf32 100644 --- a/sysproduction/interactive_manual_check_fx_prices.py +++ b/sysproduction/interactive_manual_check_fx_prices.py @@ -32,7 +32,11 @@ def interactive_manual_check_fx_prices(): if fx_code is EXIT_STR: do_another = False ## belt. Also braces. else: - data.log.debug("Updating log attributes", currency_code=fx_code) + data.log.debug( + "Updating log attributes", + method="clear", + currency_code=fx_code, + ) check_fx_ok_for_broker(data, fx_code) update_manual_check_fx_prices_for_code(fx_code, data) diff --git a/sysproduction/interactive_manual_check_historical_prices.py b/sysproduction/interactive_manual_check_historical_prices.py index b172ef324a..115a116dd6 100644 --- a/sysproduction/interactive_manual_check_historical_prices.py +++ b/sysproduction/interactive_manual_check_historical_prices.py @@ -40,7 +40,9 @@ def interactive_manual_check_historical_prices(): else: check_instrument_ok_for_broker(data, instrument_code) data.log.debug( - "Updating log attributes", instrument_code=instrument_code + "Updating log attributes", + method="clear", + instrument_code=instrument_code, ) update_historical_prices_for_instrument( instrument_code=instrument_code, diff --git a/sysproduction/update_fx_prices.py b/sysproduction/update_fx_prices.py index 2d0b39f880..a885656b39 100644 --- a/sysproduction/update_fx_prices.py +++ b/sysproduction/update_fx_prices.py @@ -44,7 +44,11 @@ def update_fx_prices_with_data(data: dataBlob): data.log.debug("FX Codes: %s" % str(list_of_codes_all)) for fx_code in list_of_codes_all: - data.log.debug("Updating log attributes", **{CURRENCY_CODE_LOG_LABEL: fx_code}) + data.log.debug( + "Updating log attributes", + method="clear", + currency_code=fx_code, + ) update_fx_prices_for_code(fx_code, data) diff --git a/sysproduction/update_historical_prices.py b/sysproduction/update_historical_prices.py index 5f03a17179..ef74b5a62d 100644 --- a/sysproduction/update_historical_prices.py +++ b/sysproduction/update_historical_prices.py @@ -282,7 +282,11 @@ def update_historical_prices_for_list_of_instrument_codes( cleaning_config = get_config_for_price_filtering(data) for instrument_code in list_of_instrument_codes: - data.log.debug("Updating log attributes", instrument_code=instrument_code) + data.log.debug( + "Updating log attributes", + method="clear", + instrument_code=instrument_code, + ) update_historical_prices_for_instrument( instrument_code, data, diff --git a/sysproduction/update_multiple_adjusted_prices.py b/sysproduction/update_multiple_adjusted_prices.py index e71ab3e424..c27192070f 100644 --- a/sysproduction/update_multiple_adjusted_prices.py +++ b/sysproduction/update_multiple_adjusted_prices.py @@ -110,7 +110,11 @@ def update_multiple_adjusted_prices_for_instrument( :return: None """ - data.log.debug("Updating log attributes", instrument_code=instrument_code) + data.log.debug( + "Updating log attributes", + method="clear", + instrument_code=instrument_code, + ) updated_multiple_prices = calc_updated_multiple_prices(data, instrument_code) updated_adjusted_prices = calc_update_adjusted_prices( data, instrument_code, updated_multiple_prices From e5641b86815c992041e54603dfeef43aa5c03161 Mon Sep 17 00:00:00 2001 From: Yuntai Kyong Date: Thu, 5 Oct 2023 22:56:58 +0900 Subject: [PATCH 016/235] Fix to use total_seconds --- sysobjects/production/process_control.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sysobjects/production/process_control.py b/sysobjects/production/process_control.py index 10de26a8f4..01b1ca94db 100644 --- a/sysobjects/production/process_control.py +++ b/sysobjects/production/process_control.py @@ -301,7 +301,7 @@ def has_process_finished_in_last_day(self) -> bool: time_now = datetime.datetime.now() time_delta = time_now - end_time - if time_delta.seconds <= SECONDS_PER_DAY: + if time_delta.total_seconds() <= SECONDS_PER_DAY: return True else: return False From 9b605c46f63fcab024aa61b7f232a4cf43e18a59 Mon Sep 17 00:00:00 2001 From: rob Date: Fri, 6 Oct 2023 10:15:54 +0100 Subject: [PATCH 017/235] minor tweaks to backtest; alternative to #1264 --- sysobjects/instruments.py | 2 +- sysproduction/data/broker.py | 6 ++--- systems/accounts/account_inputs.py | 27 ++++++++++++------- .../account_curve_order_simulator.py | 4 +-- systems/portfolio.py | 3 ++- 5 files changed, 26 insertions(+), 16 deletions(-) diff --git a/sysobjects/instruments.py b/sysobjects/instruments.py index 5b8aec424d..b77e06e684 100644 --- a/sysobjects/instruments.py +++ b/sysobjects/instruments.py @@ -389,7 +389,7 @@ def calculate_cost_per_block_commission(self, blocks_traded): def calculate_percentage_commission(self, blocks_traded, price_per_block): trade_value = self.calculate_trade_value(blocks_traded, price_per_block) - return self._percentage_cost * trade_value + return self.percentage_cost * trade_value def calculate_trade_value(self, blocks_traded, value_per_block): return abs(blocks_traded) * value_per_block diff --git a/sysproduction/data/broker.py b/sysproduction/data/broker.py index 29da638178..a1c553d0f8 100644 --- a/sysproduction/data/broker.py +++ b/sysproduction/data/broker.py @@ -227,9 +227,9 @@ def get_trading_hours_for_contract( return result def get_all_current_contract_positions(self) -> listOfContractPositions: - - list_of_positions = ( - self.broker_contract_position_data.get_all_current_positions_as_list_with_contract_objects() + broker_account_id = self.get_broker_account() + list_of_positions = self.broker_contract_position_data.get_all_current_positions_as_list_with_contract_objects( + broker_account_id ) return list_of_positions diff --git a/systems/accounts/account_inputs.py b/systems/accounts/account_inputs.py index 24ef92d7fa..278dfa71a4 100644 --- a/systems/accounts/account_inputs.py +++ b/systems/accounts/account_inputs.py @@ -37,16 +37,25 @@ def instrument_prices_for_position_or_forecast_infer_frequency( self, instrument_code: str, position_or_forecast: pd.Series = arg_not_supplied ) -> pd.Series: - frequency = infer_frequency(position_or_forecast) - if frequency is BUSINESS_DAY_FREQ: - instrument_prices = self.get_daily_prices(instrument_code) - elif frequency is HOURLY_FREQ: - instrument_prices = self.get_hourly_prices(instrument_code) - else: - raise Exception( - "Frequency %s does not have prices for %s should be hourly or daily" - % (str(frequency), instrument_code) + try: + frequency = infer_frequency(position_or_forecast) + if frequency is BUSINESS_DAY_FREQ: + instrument_prices = self.get_daily_prices(instrument_code) + elif frequency is HOURLY_FREQ: + instrument_prices = self.get_hourly_prices(instrument_code) + else: + raise Exception( + "Frequency %s does not have prices for %s should be hourly or daily" + % (str(frequency), instrument_code) + ) + except: + self.log.warning( + "Going to index hourly prices for %s to position_or_forecast might result in phantoms" + % instrument_code ) + hourly_prices = self.get_hourly_prices(instrument_code) + + instrument_prices = hourly_prices.reindex(position_or_forecast.index) return instrument_prices diff --git a/systems/accounts/order_simulator/account_curve_order_simulator.py b/systems/accounts/order_simulator/account_curve_order_simulator.py index e6f908b333..93f6d0c33d 100644 --- a/systems/accounts/order_simulator/account_curve_order_simulator.py +++ b/systems/accounts/order_simulator/account_curve_order_simulator.py @@ -17,7 +17,7 @@ def pandl_for_subsystem( self, instrument_code, delayfill=True, roundpositions=True ) -> accountCurve: - self.log.msg( + self.log.debug( "Calculating pandl for subsystem for instrument %s" % instrument_code, instrument_code=instrument_code, ) @@ -78,7 +78,7 @@ def _pandl_calculator_for_subsystem_with_cash_costs( def pandl_for_instrument( self, instrument_code: str, delayfill: bool = True, roundpositions: bool = True ) -> accountCurve: - self.log.msg( + self.log.debug( "Calculating pandl for instrument for %s" % instrument_code, instrument_code=instrument_code, ) diff --git a/systems/portfolio.py b/systems/portfolio.py index 536cc16f63..219b2f410c 100644 --- a/systems/portfolio.py +++ b/systems/portfolio.py @@ -887,8 +887,9 @@ def pandl_across_subsystems( if instrument_list is arg_not_supplied: instrument_list = self.get_instrument_list() + ## roundpositions=True required to make IDM work with order simulator return accounts.pandl_across_subsystems_given_instrument_list( - instrument_list, roundpositions=False + instrument_list, roundpositions=True ) @input From 5cbb2edd006c9f2d5382af4048bdcdb2562fe7ae Mon Sep 17 00:00:00 2001 From: Yuntai Kyong Date: Sat, 7 Oct 2023 19:49:52 +0900 Subject: [PATCH 018/235] Fix strategy_report when `risk_overlay` is not configured --- sysproduction/strategy_code/report_system_classic.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/sysproduction/strategy_code/report_system_classic.py b/sysproduction/strategy_code/report_system_classic.py index ed6678ce81..c5d0497c8a 100644 --- a/sysproduction/strategy_code/report_system_classic.py +++ b/sysproduction/strategy_code/report_system_classic.py @@ -529,7 +529,11 @@ def risk_scaling_string(backtest) -> str: backtest_system_portfolio_stage.get_leverage_for_original_position().iloc[-1] ) percentage_vol_target = backtest_system_portfolio_stage.get_percentage_vol_target() - risk_scalar_final = backtest_system_portfolio_stage.get_risk_scalar().iloc[-1] + risk_scalar = backtest_system_portfolio_stage.get_risk_scalar() + if type(risk_scalar) is pd.Series: + risk_scalar_final = risk_scalar.iloc[-1] + else: + risk_scalar_final = risk_scalar risk_overlay_config = ( backtest_system_portfolio_stage.config.get_element_or_arg_not_supplied( "risk_overlay" From b54fd3c1dd906be0bf01f874fe91d2772b7336ef Mon Sep 17 00:00:00 2001 From: Andy Geach Date: Tue, 10 Oct 2023 14:41:59 +0100 Subject: [PATCH 019/235] refactor some simple log.setup() usages - pass temporary log attributes instead of copying the logger instance. Adds TODO comments for the remaining ones --- sysbrokers/IB/client/ib_contracts_client.py | 74 +++++++++++-------- sysbrokers/IB/client/ib_fx_client.py | 1 + sysbrokers/IB/client/ib_price_client.py | 32 ++++---- sysbrokers/IB/config/ib_instrument_config.py | 12 +-- sysbrokers/IB/ib_Fx_prices_data.py | 29 +++++--- .../IB/ib_futures_contract_price_data.py | 29 +++++--- sysbrokers/IB/ib_futures_contracts_data.py | 36 +++++---- sysbrokers/IB/ib_orders.py | 16 ++-- sysbrokers/broker_futures_contract_data.py | 8 +- syscontrol/run_process.py | 1 + syscontrol/timer_functions.py | 1 + .../arctic_futures_per_contract_prices.py | 15 ++-- sysdata/arctic/arctic_spotfx_prices.py | 15 ++-- sysdata/config/configdata.py | 1 + sysdata/csv/csv_futures_contract_prices.py | 7 +- sysdata/data_blob.py | 1 + sysdata/futures/contracts.py | 32 +++++--- .../futures/futures_per_contract_prices.py | 48 +++++++----- sysdata/futures/multiple_prices.py | 27 ++++--- sysdata/fx/spotfx.py | 56 +++++++++----- sysdata/sim/sim_data.py | 1 + sysexecution/algos/algo.py | 34 +++++---- sysexecution/algos/algo_market.py | 13 +++- sysexecution/algos/algo_original_best.py | 6 +- sysexecution/algos/allocate_algo_to_order.py | 32 +++++--- sysexecution/algos/common_functions.py | 22 ++++-- .../order_stacks/contract_order_stack.py | 14 +++- .../order_stacks/instrument_order_stack.py | 11 ++- sysexecution/order_stacks/order_stack.py | 47 ++++++------ sysexecution/orders/base_orders.py | 8 ++ sysexecution/orders/broker_orders.py | 18 +++++ sysexecution/orders/contract_orders.py | 17 +++++ sysexecution/orders/instrument_orders.py | 14 ++++ sysexecution/stack_handler/balance_trades.py | 61 +++++++++------ .../stack_handler/cancel_and_modify.py | 23 ++++-- ...eate_broker_orders_from_contract_orders.py | 24 +++--- sysexecution/stack_handler/fills.py | 29 ++++---- sysexecution/stack_handler/roll_orders.py | 8 +- .../spawn_children_from_instrument_orders.py | 54 ++++++++------ .../stack_handler/stackHandlerCore.py | 7 +- .../strategies/classic_buffered_positions.py | 7 +- .../strategies/dynamic_optimised_positions.py | 7 +- .../strategies/strategy_order_handling.py | 35 +++++---- sysinit/futures/seed_price_data_from_IB.py | 23 ++++-- sysobjects/contracts.py | 18 +++-- sysproduction/data/positions.py | 29 ++++---- .../interactive_update_roll_status.py | 1 - sysproduction/update_historical_prices.py | 1 + sysproduction/update_sampled_contracts.py | 38 ++++++---- systems/stage.py | 1 + 50 files changed, 668 insertions(+), 376 deletions(-) diff --git a/sysbrokers/IB/client/ib_contracts_client.py b/sysbrokers/IB/client/ib_contracts_client.py index a9082047da..c26251e408 100644 --- a/sysbrokers/IB/client/ib_contracts_client.py +++ b/sysbrokers/IB/client/ib_contracts_client.py @@ -44,10 +44,6 @@ def broker_get_futures_contract_list( ) -> list: ## Returns list of contract date strings YYYYMMDD - specific_log = self.log.setup( - instrument_code=futures_instrument_with_ib_data.instrument_code - ) - ibcontract_pattern = ib_futures_instrument(futures_instrument_with_ib_data) contract_list = self.ib_get_contract_chain( ibcontract_pattern, allow_expired=allow_expired @@ -72,9 +68,11 @@ def broker_get_single_contract_expiry_date( :param futures_contract_with_ib_data: contract where instrument has ib metadata :return: YYYYMMDD str """ - specific_log = futures_contract_with_ib_data.specific_log(self.log) + log_attrs = {**futures_contract_with_ib_data.log_attributes(), "method": "temp"} if futures_contract_with_ib_data.is_spread_contract(): - specific_log.warning("Can only find expiry for single leg contract!") + self.log.warning( + "Can only find expiry for single leg contract!", **log_attrs + ) raise missingContract try: @@ -84,7 +82,7 @@ def broker_get_single_contract_expiry_date( always_return_single_leg=True, ) except missingContract: - specific_log.warning("Contract is missing can't get expiry") + self.log.warning("Contract is missing can't get expiry", **log_attrs) raise missingContract expiry_date = ibcontract.lastTradeDateOrContractMonth @@ -103,16 +101,16 @@ def _ib_get_uncached_trading_hours( self, contract_object_with_ib_data: futuresContract ) -> listOfTradingHours: - specific_log = contract_object_with_ib_data.specific_log(self.log) - try: trading_hours_from_ib = self.ib_get_trading_hours_from_IB( contract_object_with_ib_data ) except Exception as e: - specific_log.warning( + self.log.warning( "%s when getting trading hours from %s!" - % (str(e), str(contract_object_with_ib_data)) + % (str(e), str(contract_object_with_ib_data)), + **contract_object_with_ib_data.log_attributes(), + method="temp", ) raise missingData @@ -136,7 +134,6 @@ def _ib_get_uncached_trading_hours( def ib_get_trading_hours_from_IB( self, contract_object_with_ib_data: futuresContract ) -> listOfTradingHours: - specific_log = contract_object_with_ib_data.specific_log(self.log) try: ib_contract_details = self.ib_get_contract_details( @@ -146,9 +143,11 @@ def ib_get_trading_hours_from_IB( ib_contract_details ) except Exception as e: - specific_log.warning( + self.log.warning( "%s when getting trading hours from %s!" - % (str(e), str(contract_object_with_ib_data)) + % (str(e), str(contract_object_with_ib_data)), + **contract_object_with_ib_data.log_attributes(), + method="temp", ) raise missingData @@ -214,15 +213,16 @@ def ib_get_saved_weekly_trading_hours_custom_for_contract( def ib_get_saved_weekly_trading_hours_for_timezone_of_contract( self, contract_object_with_ib_data: futuresContract ) -> weekdayDictOfListOfTradingHoursAnyDay: - specific_log = contract_object_with_ib_data.log(self.log) + log_attrs = {**contract_object_with_ib_data.log_attributes(), "method": "temp"} try: time_zone_id = self.ib_get_timezoneid(contract_object_with_ib_data) except missingData: # problem getting timezoneid - specific_log.warning( + self.log.warning( "No time zone ID, can't get trading hours for timezone for %s" - % str(contract_object_with_ib_data) + % str(contract_object_with_ib_data), + **log_attrs, ) raise missingData @@ -235,22 +235,24 @@ def ib_get_saved_weekly_trading_hours_for_timezone_of_contract( "Check ib_config_trading_hours in sysbrokers/IB or private directory, hours for timezone %s not found!" % time_zone_id ) - specific_log.log.critical(error_msg) + # TODO check this double log + self.log.log.critical(error_msg, **log_attrs) raise missingData return weekly_hours_for_timezone def ib_get_timezoneid(self, contract_object_with_ib_data: futuresContract) -> str: - specific_log = contract_object_with_ib_data.specific_log(self.log) try: ib_contract_details = self.ib_get_contract_details( contract_object_with_ib_data ) time_zone_id = ib_contract_details.timeZoneId except Exception as e: - specific_log.warning( + self.log.warning( "%s when getting time zone from %s!" - % (str(e), str(contract_object_with_ib_data)) + % (str(e), str(contract_object_with_ib_data)), + **contract_object_with_ib_data.log_attributes(), + method="temp", ) raise missingData @@ -273,13 +275,16 @@ def _get_all_saved_weekly_trading_hours_from_file(self): def ib_get_min_tick_size( self, contract_object_with_ib_data: futuresContract ) -> float: - specific_log = contract_object_with_ib_data.specific_log(self.log) + log_attrs = {**contract_object_with_ib_data.log_attributes(), "method": "temp"} try: ib_contract = self.ib_futures_contract( contract_object_with_ib_data, always_return_single_leg=True ) except missingContract: - specific_log.warning("Can't get tick size as contract missing") + self.log.warning( + "Can't get tick size as contract missing", + **log_attrs, + ) raise ib_contract_details = self.ib.reqContractDetails(ib_contract)[0] @@ -287,9 +292,10 @@ def ib_get_min_tick_size( try: min_tick = ib_contract_details.minTick except Exception as e: - specific_log.warning( + self.log.warning( "%s when getting min tick size from %s!" - % (str(e), str(ib_contract_details)) + % (str(e), str(ib_contract_details)), + log_attrs, ) raise missingContract @@ -298,13 +304,15 @@ def ib_get_min_tick_size( def ib_get_price_magnifier( self, contract_object_with_ib_data: futuresContract ) -> float: - specific_log = contract_object_with_ib_data.specific_log(self.log) + log_attrs = {**contract_object_with_ib_data.log_attributes(), "method": "temp"} try: ib_contract = self.ib_futures_contract( contract_object_with_ib_data, always_return_single_leg=True ) except missingContract: - specific_log.warning("Can't get price magnifier as contract missing") + self.log.warning( + "Can't get price magnifier as contract missing", **log_attrs + ) raise ib_contract_details = self.ib.reqContractDetails(ib_contract)[0] @@ -312,22 +320,26 @@ def ib_get_price_magnifier( try: price_magnifier = ib_contract_details.priceMagnifier except Exception as e: - specific_log.warning( + self.log.warning( "%s when getting price magnifier from %s!" - % (str(e), str(ib_contract_details)) + % (str(e), str(ib_contract_details)), + **log_attrs, ) raise missingContract return price_magnifier def ib_get_contract_details(self, contract_object_with_ib_data: futuresContract): - specific_log = contract_object_with_ib_data.specific_log(self.log) try: ib_contract = self.ib_futures_contract( contract_object_with_ib_data, always_return_single_leg=True ) except missingContract: - specific_log.warning("Can't get trading hours as contract is missing") + self.log.warning( + "Can't get trading hours as contract is missing", + contract_object_with_ib_data.log_attributes(), + method="temp", + ) raise # returns a list but should only have one element diff --git a/sysbrokers/IB/client/ib_fx_client.py b/sysbrokers/IB/client/ib_fx_client.py index 4faf02da55..105439fda8 100644 --- a/sysbrokers/IB/client/ib_fx_client.py +++ b/sysbrokers/IB/client/ib_fx_client.py @@ -77,6 +77,7 @@ def broker_get_daily_fx_data( """ ccy_code = ccy1 + ccy2 + # TODO log.setup log = self.log.setup(currency_code=ccy_code) try: diff --git a/sysbrokers/IB/client/ib_price_client.py b/sysbrokers/IB/client/ib_price_client.py index 8de70e23b1..45a4cb5580 100644 --- a/sysbrokers/IB/client/ib_price_client.py +++ b/sysbrokers/IB/client/ib_price_client.py @@ -49,7 +49,7 @@ def broker_get_historical_futures_data_for_contract( :param freq: str; one of D, H, 5M, M, 10S, S :return: futuresContractPriceData """ - + # TODO specific_log specific_log = contract_object_with_ib_broker_config.specific_log(self.log) try: @@ -93,17 +93,17 @@ def get_ib_ticker_object( trade_list_for_multiple_legs: tradeQuantity = None, ) -> "ib.ticker": - specific_log = contract_object_with_ib_data.specific_log(self.log) - try: ibcontract = self.ib_futures_contract( contract_object_with_ib_data, trade_list_for_multiple_legs=trade_list_for_multiple_legs, ) except missingContract: - specific_log.warning( + self.log.warning( "Can't find matching IB contract for %s" - % str(contract_object_with_ib_data) + % str(contract_object_with_ib_data), + **contract_object_with_ib_data.log_attributes(), + method="temp", ) raise @@ -124,17 +124,17 @@ def cancel_market_data_for_contract_and_trade_qty( trade_list_for_multiple_legs: tradeQuantity = None, ): - specific_log = contract_object_with_ib_data.specific_log(self.log) - try: ibcontract = self.ib_futures_contract( contract_object_with_ib_data, trade_list_for_multiple_legs=trade_list_for_multiple_legs, ) except missingContract: - specific_log.warning( + self.log.warning( "Can't find matching IB contract for %s" - % str(contract_object_with_ib_data) + % str(contract_object_with_ib_data), + **contract_object_with_ib_data.log_attributes(), + method="temp", ) raise @@ -151,21 +151,19 @@ def _ib_get_recent_bid_ask_tick_data_using_reqHistoricalTicks( :param contract_object_with_ib_data: :return: """ - specific_log = self.log.setup( - instrument_code=contract_object_with_ib_data.instrument_code, - contract_date=contract_object_with_ib_data.date_str, - ) + log_attrs = {**contract_object_with_ib_data.log_attributes(), "method": "temp"} if contract_object_with_ib_data.is_spread_contract(): error_msg = "Can't get historical data for combo" - specific_log.critical(error_msg) + self.log.critical(error_msg, **log_attrs) raise Exception(error_msg) try: ibcontract = self.ib_futures_contract(contract_object_with_ib_data) except missingContract: - specific_log.warning( + self.log.warning( "Can't find matching IB contract for %s" - % str(contract_object_with_ib_data) + % str(contract_object_with_ib_data), + **log_attrs, ) raise @@ -178,7 +176,7 @@ def _ib_get_recent_bid_ask_tick_data_using_reqHistoricalTicks( return tick_data - def _get_generic_data_for_contract( + def _get_generic_data_for_contract( # TODO passed logger instance self, ibcontract: ibContract, log: pst_logger = None, diff --git a/sysbrokers/IB/config/ib_instrument_config.py b/sysbrokers/IB/config/ib_instrument_config.py index d196450628..ee35650f27 100644 --- a/sysbrokers/IB/config/ib_instrument_config.py +++ b/sysbrokers/IB/config/ib_instrument_config.py @@ -36,15 +36,16 @@ def get_instrument_object_from_config( instrument_code: str, config: IBconfig = None, log: pst_logger = get_logger("") ) -> futuresInstrumentWithIBConfigData: - new_log = log.setup(instrument_code=instrument_code) + log_attrs = {INSTRUMENT_CODE_LOG_LABEL: instrument_code, "method": "temp"} if config is None: try: config = read_ib_config_from_file() except missingFile as e: - new_log.warning( + log.warning( "Can't get config for instrument %s as IB configuration file missing" - % instrument_code + % instrument_code, + **log_attrs, ) raise missingInstrument from e @@ -52,8 +53,9 @@ def get_instrument_object_from_config( try: assert instrument_code in list_of_instruments except: - new_log.warning( - "Instrument %s is not in IB configuration file" % instrument_code + log.warning( + "Instrument %s is not in IB configuration file" % instrument_code, + **log_attrs, ) raise missingInstrument diff --git a/sysbrokers/IB/ib_Fx_prices_data.py b/sysbrokers/IB/ib_Fx_prices_data.py index 7da66b699c..f252faf1c4 100644 --- a/sysbrokers/IB/ib_Fx_prices_data.py +++ b/sysbrokers/IB/ib_Fx_prices_data.py @@ -52,8 +52,11 @@ def _get_fx_prices_without_checking(self, currency_code: str) -> fxPrices: try: ib_config_for_code = self._get_config_info_for_code(currency_code) except missingInstrument: - log = self.log.setup(**{CURRENCY_CODE_LOG_LABEL: currency_code}) - log.warning("Can't get prices as missing IB config for %s" % currency_code) + self.log.warning( + "Can't get prices as missing IB config for %s" % currency_code, + CURRENCY_CODE_LOG_LABEL=currency_code, + method="temp", + ) return fxPrices.create_empty() data = self._get_fx_prices_with_ib_config(currency_code, ib_config_for_code) @@ -65,12 +68,16 @@ def _get_fx_prices_with_ib_config( ) -> fxPrices: raw_fx_prices_as_series = self._get_raw_fx_prices(ib_config_for_code) - log = self.log.setup(**{CURRENCY_CODE_LOG_LABEL: currency_code}) + log_attrs = { + CURRENCY_CODE_LOG_LABEL: currency_code, + "method": "temp", + } if len(raw_fx_prices_as_series) == 0: - log.warning( + self.log.warning( "No available IB prices for %s %s" - % (currency_code, str(ib_config_for_code)) + % (currency_code, str(ib_config_for_code)), + **log_attrs, ) return fxPrices.create_empty() @@ -82,7 +89,10 @@ def _get_fx_prices_with_ib_config( # turn into a fxPrices fx_prices = fxPrices(raw_fx_prices) - log.debug("Downloaded %d prices" % len(fx_prices)) + self.log.debug( + "Downloaded %d prices" % len(fx_prices), + **log_attrs, + ) return fx_prices @@ -103,9 +113,10 @@ def _get_config_info_for_code(self, currency_code: str) -> ibFXConfig: try: config_data = self._get_ib_fx_config() except missingFile as e: - new_log = self.log.setup(**{CURRENCY_CODE_LOG_LABEL: currency_code}) - new_log.warning( - "Can't get IB FX config for %s as config file missing" % currency_code + self.log.warning( + "Can't get IB FX config for %s as config file missing" % currency_code, + CURRENCY_CODE_LOG_LABEL=currency_code, + method="temp", ) raise missingInstrument from e diff --git a/sysbrokers/IB/ib_futures_contract_price_data.py b/sysbrokers/IB/ib_futures_contract_price_data.py index ff9ba6de61..dabefa80bd 100644 --- a/sysbrokers/IB/ib_futures_contract_price_data.py +++ b/sysbrokers/IB/ib_futures_contract_price_data.py @@ -201,7 +201,6 @@ def _get_prices_at_frequency_for_contract_object_no_checking_with_expiry_flag( :param frequency: str; one of D, H, 15M, 5M, M, 10S, S :return: data """ - new_log = futures_contract_object.log(self.log) try: contract_object_with_ib_broker_config = ( @@ -210,7 +209,11 @@ def _get_prices_at_frequency_for_contract_object_no_checking_with_expiry_flag( ) ) except missingContract: - new_log.warning("Can't get data for %s" % str(futures_contract_object)) + self.log.warning( + "Can't get data for %s" % str(futures_contract_object), + **futures_contract_object.log_attributes(), + method="temp", + ) raise missingData price_data = self._get_prices_at_frequency_for_ibcontract_object_no_checking( @@ -275,7 +278,6 @@ def get_ticker_object_for_contract_and_trade_qty( futures_contract: futuresContract, trade_list_for_multiple_legs: tradeQuantity = None, ) -> tickerObject: - new_log = futures_contract.specific_log(self.log) try: contract_object_with_ib_data = ( @@ -284,7 +286,11 @@ def get_ticker_object_for_contract_and_trade_qty( ) ) except missingContract as e: - new_log.warning("Can't get data for %s" % str(futures_contract)) + self.log.warning( + "Can't get data for %s" % str(futures_contract), + **futures_contract.log_attributes(), + method="temp", + ) raise e ticker_with_bs = self.ib_client.get_ticker_object_with_BS( @@ -297,13 +303,16 @@ def get_ticker_object_for_contract_and_trade_qty( return ticker_object def cancel_market_data_for_contract(self, contract: futuresContract): - new_log = contract.specific_log(self.log) try: contract_object_with_ib_data = ( self.futures_contract_data.get_contract_object_with_IB_data(contract) ) except missingContract: - new_log.warning("Can't get data for %s" % str(contract)) + self.log.warning( + "Can't get data for %s" % str(contract), + **contract.log_attributes(), + method="temp", + ) return futuresContractPrices.create_empty() self.ib_client.cancel_market_data_for_contract(contract_object_with_ib_data) @@ -312,8 +321,6 @@ def cancel_market_data_for_order(self, order: ibBrokerOrder): contract_object = order.futures_contract trade_list_for_multiple_legs = order.trade - new_log = order.log_with_attributes(self.log) - try: contract_object_with_ib_data = ( self.futures_contract_data.get_contract_object_with_IB_data( @@ -321,7 +328,11 @@ def cancel_market_data_for_order(self, order: ibBrokerOrder): ) ) except missingContract: - new_log.warning("Can't get data for %s" % str(contract_object)) + self.log.warning( + "Can't get data for %s" % str(contract_object), + **order.log_attributes(), + method="temp", + ) return futuresContractPrices.create_empty() self.ib_client.cancel_market_data_for_contract_and_trade_qty( diff --git a/sysbrokers/IB/ib_futures_contracts_data.py b/sysbrokers/IB/ib_futures_contracts_data.py index a1a5a09a51..57cbc72b23 100644 --- a/sysbrokers/IB/ib_futures_contracts_data.py +++ b/sysbrokers/IB/ib_futures_contracts_data.py @@ -98,9 +98,12 @@ def get_actual_expiry_date_for_single_contract( :param futures_contract: type futuresContract :return: YYYYMMDD or None """ - log = futures_contract.specific_log(self.log) if futures_contract.is_spread_contract(): - log.warning("Can't find expiry for multiple leg contract here") + self.log.warning( + "Can't find expiry for multiple leg contract here", + **futures_contract.log_attributes(), + method="temp", + ) raise missingContract contract_object_with_ib_data = self.get_contract_object_with_IB_data( @@ -114,9 +117,12 @@ def get_actual_expiry_date_for_single_contract( def _get_actual_expiry_date_given_single_contract_with_ib_metadata( self, futures_contract_with_ib_data: futuresContract, allow_expired=False ) -> expiryDate: - log = futures_contract_with_ib_data.specific_log(self.log) if futures_contract_with_ib_data.is_spread_contract(): - log.warning("Can't find expiry for multiple leg contract here") + self.log.warning( + "Can't find expiry for multiple leg contract here", + **futures_contract_with_ib_data.log_attributes(), + method="temp", + ) raise missingContract expiry_date = self.ib_client.broker_get_single_contract_expiry_date( @@ -158,13 +164,15 @@ def _get_futures_instrument_object_with_IB_data( ) def get_min_tick_size_for_contract(self, contract_object: futuresContract) -> float: - new_log = contract_object.log(self.log) + log_attrs = {**contract_object.log_attributes(), "method": "temp"} try: contract_object_with_ib_data = self.get_contract_object_with_IB_data( contract_object ) except missingContract: - new_log.debug("Can't resolve contract so can't find tick size") + self.log.debug( + "Can't resolve contract so can't find tick size", **log_attrs + ) raise try: @@ -172,7 +180,7 @@ def get_min_tick_size_for_contract(self, contract_object: futuresContract) -> fl contract_object_with_ib_data ) except missingContract: - new_log.debug("No tick size found") + self.log.debug("No tick size found", **log_attrs) raise return min_tick_size @@ -180,13 +188,15 @@ def get_min_tick_size_for_contract(self, contract_object: futuresContract) -> fl def get_price_magnifier_for_contract( self, contract_object: futuresContract ) -> float: - new_log = contract_object.log(self.log) + log_attrs = {**contract_object.log_attributes(), "method": "temp"} try: contract_object_with_ib_data = self.get_contract_object_with_IB_data( contract_object ) except missingContract: - new_log.debug("Can't resolve contract so can't find tick size") + self.log.debug( + "Can't resolve contract so can't find tick size", **log_attrs + ) raise try: @@ -194,7 +204,7 @@ def get_price_magnifier_for_contract( contract_object_with_ib_data ) except missingContract: - new_log.debug("No contract found") + self.log.debug("No contract found", **log_attrs) raise return price_magnifier @@ -207,14 +217,14 @@ def get_trading_hours_for_contract( :param futures_contract: :return: list of paired date times """ - new_log = futures_contract.log(self.log) + log_attrs = {**futures_contract.log_attributes(), "method": "temp"} try: contract_object_with_ib_data = self.get_contract_object_with_IB_data( futures_contract ) except missingContract: - new_log.debug("Can't resolve contract") + self.log.debug("Can't resolve contract", **log_attrs) raise missingContract try: @@ -222,7 +232,7 @@ def get_trading_hours_for_contract( contract_object_with_ib_data ) except missingData: - new_log.debug("No trading hours found") + self.log.debug("No trading hours found", **log_attrs) trading_hours = listOfTradingHours([]) return trading_hours diff --git a/sysbrokers/IB/ib_orders.py b/sysbrokers/IB/ib_orders.py index c97586e977..1435ecb7b1 100644 --- a/sysbrokers/IB/ib_orders.py +++ b/sysbrokers/IB/ib_orders.py @@ -297,8 +297,10 @@ def _send_broker_order_to_IB(self, broker_order: brokerOrder) -> tradeWithContra """ - log = broker_order.log_with_attributes(self.log) - log.debug("Going to submit order %s to IB" % str(broker_order)) + log_attrs = {**broker_order.log_attributes(), "method": "temp"} + self.log.debug( + "Going to submit order %s to IB" % str(broker_order), **log_attrs + ) trade_list = broker_order.trade order_type = broker_order.order_type @@ -318,10 +320,10 @@ def _send_broker_order_to_IB(self, broker_order: brokerOrder) -> tradeWithContra limit_price=limit_price, ) if placed_broker_trade_object is missing_order: - log.warning("Couldn't submit order") + self.log.warning("Couldn't submit order", **log_attrs) return missing_order - log.debug("Order submitted to IB") + self.log.debug("Order submitted to IB", **log_attrs) return placed_broker_trade_object @@ -377,16 +379,16 @@ def match_db_broker_order_to_control_order_from_brokers( def cancel_order_on_stack(self, broker_order: brokerOrder): - log = broker_order.log_with_attributes(self.log) + log_attrs = {**broker_order.log_attributes(), "method": "temp"} matched_control_order = ( self.match_db_broker_order_to_control_order_from_brokers(broker_order) ) if matched_control_order is missing_order: - log.warning("Couldn't cancel non existent order") + self.log.warning("Couldn't cancel non existent order", **log_attrs) return None self.cancel_order_given_control_object(matched_control_order) - log.debug("Sent cancellation for %s" % str(broker_order)) + self.log.debug("Sent cancellation for %s" % str(broker_order), **log_attrs) def cancel_order_given_control_object( self, broker_orders_with_controls: ibOrderWithControls diff --git a/sysbrokers/broker_futures_contract_data.py b/sysbrokers/broker_futures_contract_data.py index 9e46726ba7..a927d40ada 100644 --- a/sysbrokers/broker_futures_contract_data.py +++ b/sysbrokers/broker_futures_contract_data.py @@ -22,13 +22,15 @@ def get_min_tick_size_for_contract(self, contract_object: futuresContract) -> fl raise NotImplementedError def is_contract_okay_to_trade(self, futures_contract: futuresContract) -> bool: - new_log = futures_contract.log(self.log) try: trading_hours = self.get_trading_hours_for_contract(futures_contract) except missingContract: - new_log.critical( - "Error! Cannot find active contract! Expired? interactive_update_roll_status.py not executed?" + self.log.critical( + "Error! Cannot find active contract! Expired? " + "interactive_update_roll_status.py not executed?", + **futures_contract.log_attributes(), + method="temp", ) return False diff --git a/syscontrol/run_process.py b/syscontrol/run_process.py index 409ae1085f..b8008d0236 100644 --- a/syscontrol/run_process.py +++ b/syscontrol/run_process.py @@ -70,6 +70,7 @@ def list_of_timer_functions(self) -> listOfTimerFunctions: return self._list_of_timer_functions def _setup(self): + # TODO log.setup self.data.log.setup(type=self.process_name) self._log = self.data.log data_control = dataControlProcess(self.data) diff --git a/syscontrol/timer_functions.py b/syscontrol/timer_functions.py index dc289cefd4..41a889207b 100644 --- a/syscontrol/timer_functions.py +++ b/syscontrol/timer_functions.py @@ -27,6 +27,7 @@ def __init__( self._data = data self._parameters = parameters + # TODO log.setup log.setup(type=self.process_name) self._log = log self._report_status = reportStatus(log) diff --git a/sysdata/arctic/arctic_futures_per_contract_prices.py b/sysdata/arctic/arctic_futures_per_contract_prices.py index ccdf538323..f11d9d75b7 100644 --- a/sysdata/arctic/arctic_futures_per_contract_prices.py +++ b/sysdata/arctic/arctic_futures_per_contract_prices.py @@ -94,7 +94,6 @@ def _write_prices_at_frequency_for_contract_object_no_checking( frequency: Frequency, ): - log = futures_contract_object.log(self.log) ident = from_contract_and_freq_to_key( futures_contract_object, frequency=frequency ) @@ -102,14 +101,16 @@ def _write_prices_at_frequency_for_contract_object_no_checking( self.arctic_connection.write(ident, futures_price_data_as_pd) - log.debug( + self.log.debug( "Wrote %s lines of prices for %s at %s to %s" % ( len(futures_price_data), str(futures_contract_object.key), str(frequency), str(self), - ) + ), + **futures_contract_object.log_attributes(), + method="temp", ) def get_contracts_with_merged_price_data(self) -> listOfFuturesContracts: @@ -190,15 +191,15 @@ def _delete_merged_prices_for_contract_object_with_no_checks_be_careful( def _delete_prices_at_frequency_for_contract_object_with_no_checks_be_careful( self, futures_contract_object: futuresContract, frequency: Frequency ): - log = futures_contract_object.log(self.log) - ident = from_contract_and_freq_to_key( contract=futures_contract_object, frequency=frequency ) self.arctic_connection.delete(ident) - log.debug( + self.log.debug( "Deleted all prices for %s from %s" - % (futures_contract_object.key, str(self)) + % (futures_contract_object.key, str(self)), + **futures_contract_object.log_attributes(), + method="temp", ) diff --git a/sysdata/arctic/arctic_spotfx_prices.py b/sysdata/arctic/arctic_spotfx_prices.py index 5badf78342..4eb56b730d 100644 --- a/sysdata/arctic/arctic_spotfx_prices.py +++ b/sysdata/arctic/arctic_spotfx_prices.py @@ -36,21 +36,24 @@ def _get_fx_prices_without_checking(self, currency_code: str) -> fxPrices: return fx_prices def _delete_fx_prices_without_any_warning_be_careful(self, currency_code: str): - log = self.log.setup(**{CURRENCY_CODE_LOG_LABEL: currency_code}) self.arctic.delete(currency_code) - log.debug("Deleted fX prices for %s from %s" % (currency_code, str(self))) + self.log.debug( + "Deleted fX prices for %s from %s" % (currency_code, str(self)), + CURRENCY_CODE_LOG_LABEL=currency_code, + method="temp", + ) def _add_fx_prices_without_checking_for_existing_entry( self, currency_code: str, fx_price_data: fxPrices ): - log = self.log.setup(**{CURRENCY_CODE_LOG_LABEL: currency_code}) - fx_price_data_aspd = pd.DataFrame(fx_price_data) fx_price_data_aspd.columns = ["price"] fx_price_data_aspd = fx_price_data_aspd.astype(float) self.arctic.write(currency_code, fx_price_data_aspd) - log.debug( + self.log.debug( "Wrote %s lines of prices for %s to %s" - % (len(fx_price_data), currency_code, str(self)) + % (len(fx_price_data), currency_code, str(self)), + CURRENCY_CODE_LOG_LABEL=currency_code, + method="temp", ) diff --git a/sysdata/config/configdata.py b/sysdata/config/configdata.py index 8a6849dcf2..934d0720ca 100644 --- a/sysdata/config/configdata.py +++ b/sysdata/config/configdata.py @@ -190,6 +190,7 @@ def system_init(self, base_system): """ # inherit the log + # TODO log.setup setattr(self, "log", base_system.log.setup(stage="config")) # fill with defaults diff --git a/sysdata/csv/csv_futures_contract_prices.py b/sysdata/csv/csv_futures_contract_prices.py index 2d48a056e1..7c951abb07 100644 --- a/sysdata/csv/csv_futures_contract_prices.py +++ b/sysdata/csv/csv_futures_contract_prices.py @@ -91,8 +91,11 @@ def _get_prices_at_frequency_for_contract_object_no_checking( skipfooter=skipfooter, ) except OSError: - log = futures_contract_object.log(self.log) - log.warning("Can't find adjusted price file %s" % filename) + self.log.warning( + "Can't find adjusted price file %s" % filename, + **futures_contract_object.log_attributes(), + method="temp", + ) return futuresContractPrices.create_empty() instrpricedata = instrpricedata.groupby(level=0).last() diff --git a/sysdata/data_blob.py b/sysdata/data_blob.py index 7c11be2943..efdc0db9df 100644 --- a/sysdata/data_blob.py +++ b/sysdata/data_blob.py @@ -208,6 +208,7 @@ def csv_data_paths(self) -> dict: def _get_specific_logger(self, class_object): class_name = get_class_name(class_object) + # TODO log.setup log = self.log.setup(**{COMPONENT_LOG_LABEL: class_name}) return log diff --git a/sysdata/futures/contracts.py b/sysdata/futures/contracts.py index 8cebc8ccd7..77cec6ca5f 100644 --- a/sysdata/futures/contracts.py +++ b/sysdata/futures/contracts.py @@ -45,21 +45,28 @@ def get_contract_object( def delete_contract_data( self, instrument_code: str, contract_date: str, are_you_sure=False ): - - log = self.log.setup( - instrument_code=instrument_code, contract_date=contract_date - ) + log_attrs = { + INSTRUMENT_CODE_LOG_LABEL: instrument_code, + CONTRACT_DATE_LOG_LABEL: contract_date, + "method": "temp", + } if are_you_sure: if self.is_contract_in_data(instrument_code, contract_date): self._delete_contract_data_without_any_warning_be_careful( instrument_code, contract_date ) - log.info("Deleted contract %s/%s" % (instrument_code, contract_date)) + self.log.info( + "Deleted contract %s/%s" % (instrument_code, contract_date), + **log_attrs, + ) else: # doesn't exist anyway - log.warning("Tried to delete non existent contract") + self.log.warning("Tried to delete non existent contract", **log_attrs) else: - log.error("You need to call delete_contract_data with a flag to be sure") + self.log.error( + "You need to call delete_contract_data with a flag to be sure", + **log_attrs, + ) def delete_all_contracts_for_instrument( self, instrument_code: str, areyoureallysure=False @@ -82,20 +89,23 @@ def add_contract_data( instrument_code = contract_object.instrument_code contract_date = contract_object.date_str - log = contract_object.log(self.log) + log_attrs = {**contract_object.log_attributes(), "method": "temp"} if self.is_contract_in_data(instrument_code, contract_date): if ignore_duplication: pass else: - log.warning( + self.log.warning( "There is already %s in the data, you have to delete it first" - % (contract_object.key) + % (contract_object.key), + **log_attrs, ) return None self._add_contract_object_without_checking_for_existing_entry(contract_object) - log.info("Added contract %s %s" % (instrument_code, contract_date)) + self.log.info( + "Added contract %s %s" % (instrument_code, contract_date), **log_attrs + ) def get_list_of_contract_dates_for_instrument_code( self, instrument_code: str, allow_expired: bool = False diff --git a/sysdata/futures/futures_per_contract_prices.py b/sysdata/futures/futures_per_contract_prices.py index 828d94c5ad..fa8df20bc4 100644 --- a/sysdata/futures/futures_per_contract_prices.py +++ b/sysdata/futures/futures_per_contract_prices.py @@ -299,10 +299,11 @@ def write_merged_prices_for_contract_object( not_ignoring_duplication = not ignore_duplication if not_ignoring_duplication: if self.has_merged_price_data_for_contract(futures_contract_object): - log = futures_contract_object.log(self.log) - log.warning( + self.log.warning( "There is already existing data for %s" - % futures_contract_object.key + % futures_contract_object.key, + **futures_contract_object.log_attributes(), + method="temp", ) return None @@ -330,10 +331,11 @@ def write_prices_at_frequency_for_contract_object( if self.has_price_data_for_contract_at_frequency( contract_object=futures_contract_object, frequency=frequency ): - log = futures_contract_object.log(self.log) - log.warning( + self.log.warning( "There is already existing data for %s" - % futures_contract_object.key + % futures_contract_object.key, + **futures_contract_object.log_attributes(), + method="temp", ) return None @@ -352,10 +354,10 @@ def update_prices_at_frequency_for_contract( max_price_spike: float = VERY_BIG_NUMBER, ) -> int: - new_log = contract_object.log(self.log) + log_attrs = {**contract_object.log_attributes(), "method": "temp"} if len(new_futures_per_contract_prices) == 0: - new_log.debug("No new data") + self.log.debug("No new data", **log_attrs) return 0 if frequency is MIXED_FREQ: @@ -372,8 +374,9 @@ def update_prices_at_frequency_for_contract( ) if merged_prices is SPIKE_IN_DATA: - new_log.debug( - "Price has moved too much - will need to manually check - no price update done" + self.log.debug( + "Price has moved too much - will need to manually check - no price update done", + **log_attrs, ) return SPIKE_IN_DATA @@ -381,16 +384,17 @@ def update_prices_at_frequency_for_contract( rows_added = len(merged_prices) - len(old_prices) if rows_added < 0: - new_log.critical("Can't remove prices something gone wrong!") + self.log.critical("Can't remove prices something gone wrong!", **log_attrs) raise mergeError("Merged prices have fewer rows than old prices!") elif rows_added == 0: if len(old_prices) == 0: - new_log.debug("No existing or additional data") + self.log.debug("No existing or additional data", **log_attrs) return 0 else: - new_log.debug( - "No additional data since %s " % str(old_prices.index[-1]) + self.log.debug( + "No additional data since %s " % str(old_prices.index[-1]), + **log_attrs, ) return 0 @@ -407,7 +411,7 @@ def update_prices_at_frequency_for_contract( ignore_duplication=True, ) - new_log.debug("Added %d additional rows of data" % rows_added) + self.log.debug("Added %d additional rows of data" % rows_added) return rows_added @@ -428,8 +432,11 @@ def delete_merged_prices_for_contract_object( futures_contract_object ) else: - log = futures_contract_object.log(self.log) - log.warning("Tried to delete non existent contract") + self.log.warning( + "Tried to delete non existent contract", + **futures_contract_object.log_attributes(), + method="temp", + ) def delete_prices_at_frequency_for_contract_object( self, @@ -453,9 +460,10 @@ def delete_prices_at_frequency_for_contract_object( futures_contract_object=futures_contract_object, frequency=frequency ) else: - log = futures_contract_object.log(self.log) - log.warning( - "Tried to delete non existent contract at frequency %s" % frequency + self.log.warning( + "Tried to delete non existent contract at frequency %s" % frequency, + **futures_contract_object.log_attributes(), + method="temp", ) def delete_merged_prices_for_instrument_code( diff --git a/sysdata/futures/multiple_prices.py b/sysdata/futures/multiple_prices.py index edd3c00dec..95c6a9dca7 100644 --- a/sysdata/futures/multiple_prices.py +++ b/sysdata/futures/multiple_prices.py @@ -11,6 +11,7 @@ """ from syscore.exceptions import existingData from sysdata.base_data import baseData +from syslogging.logger import * # These are used when inferring prices in an incomplete series from sysobjects.multiple_prices import futuresMultiplePrices @@ -46,23 +47,30 @@ def get_multiple_prices(self, instrument_code: str) -> futuresMultiplePrices: return multiple_prices def delete_multiple_prices(self, instrument_code: str, are_you_sure=False): - log = self.log.setup(instrument_code=instrument_code) + log_attrs = {INSTRUMENT_CODE_LOG_LABEL: instrument_code, "method": "temp"} if are_you_sure: if self.is_code_in_data(instrument_code): self._delete_multiple_prices_without_any_warning_be_careful( instrument_code ) - log.info("Deleted multiple price data for %s" % instrument_code) + self.log.info( + "Deleted multiple price data for %s" % instrument_code, + **log_attrs, + ) else: # doesn't exist anyway - log.warning( + self.log.warning( "Tried to delete non existent multiple prices for %s" - % instrument_code + % instrument_code, + **log_attrs, ) else: - log.error("You need to call delete_multiple_prices with a flag to be sure") + self.log.error( + "You need to call delete_multiple_prices with a flag to be sure", + **log_attrs, + ) raise Exception("You need to be sure!") def is_code_in_data(self, instrument_code: str) -> bool: @@ -77,14 +85,15 @@ def add_multiple_prices( multiple_price_data: futuresMultiplePrices, ignore_duplication=False, ): - log = self.log.setup(instrument_code=instrument_code) + log_attrs = {INSTRUMENT_CODE_LOG_LABEL: instrument_code, "method": "temp"} if self.is_code_in_data(instrument_code): if ignore_duplication: pass else: - log.error( + self.log.error( "There is already %s in the data, you have to delete it first" - % instrument_code + % instrument_code, + **log_attrs, ) raise existingData @@ -92,7 +101,7 @@ def add_multiple_prices( instrument_code, multiple_price_data ) - log.info("Added data for instrument %s" % instrument_code) + self.log.info("Added data for instrument %s" % instrument_code, **log_attrs) def _add_multiple_prices_without_checking_for_existing_entry( self, instrument_code: str, multiple_price_data: futuresMultiplePrices diff --git a/sysdata/fx/spotfx.py b/sysdata/fx/spotfx.py index e8ad0f7ed0..d949dddc79 100644 --- a/sysdata/fx/spotfx.py +++ b/sysdata/fx/spotfx.py @@ -83,10 +83,11 @@ def _get_fx_prices_for_inversion(self, fx_code: str) -> fxPrices: raw_fx_data = self._get_fx_prices_vs_default(currency2) if raw_fx_data.empty: - log = self.log.setup(**{CURRENCY_CODE_LOG_LABEL: fx_code}) - log.warning( + self.log.warning( "Data for %s is missing, needed to calculate %s" - % (currency2 + DEFAULT_CURRENCY, DEFAULT_CURRENCY + currency2) + % (currency2 + DEFAULT_CURRENCY, DEFAULT_CURRENCY + currency2), + CURRENCY_CODE_LOG_LABEL=fx_code, + method="temp", ) return raw_fx_data @@ -129,8 +130,11 @@ def _get_fx_prices_vs_default(self, currency1: str) -> fxPrices: def _get_fx_prices(self, code: str) -> fxPrices: if not self.is_code_in_data(code): - log = self.log.setup(**{CURRENCY_CODE_LOG_LABEL: code}) - log.warning("Currency %s is missing from list of FX data" % code) + self.log.warning( + "Currency %s is missing from list of FX data" % code, + CURRENCY_CODE_LOG_LABEL=code, + method="temp", + ) return fxPrices.create_empty() @@ -139,18 +143,27 @@ def _get_fx_prices(self, code: str) -> fxPrices: return data def delete_fx_prices(self, code: str, are_you_sure=False): - log = self.log.setup(**{CURRENCY_CODE_LOG_LABEL: code}) + log_attrs = {CURRENCY_CODE_LOG_LABEL: code, "method": "temp"} if are_you_sure: if self.is_code_in_data(code): self._delete_fx_prices_without_any_warning_be_careful(code) - log.info("Deleted fx price data for %s" % code) + self.log.info( + "Deleted fx price data for %s" % code, + **log_attrs, + ) else: # doesn't exist anyway - log.warning("Tried to delete non existent fx prices for %s" % code) + self.log.warning( + "Tried to delete non existent fx prices for %s" % code, + **log_attrs, + ) else: - log.warning("You need to call delete_fx_prices with a flag to be sure") + self.log.warning( + "You need to call delete_fx_prices with a flag to be sure", + **log_attrs, + ) def is_code_in_data(self, code: str) -> bool: if code in self.get_list_of_fxcodes(): @@ -161,19 +174,20 @@ def is_code_in_data(self, code: str) -> bool: def add_fx_prices( self, code: str, fx_price_data: fxPrices, ignore_duplication: bool = False ): - log = self.log.setup(**{CURRENCY_CODE_LOG_LABEL: code}) + log_attrs = {CURRENCY_CODE_LOG_LABEL: code, "method": "temp"} if self.is_code_in_data(code): if ignore_duplication: pass else: - log.warning( - "There is already %s in the data, you have to delete it first, or set ignore_duplication=True, or use update_fx_prices" - % code + self.log.warning( + "There is already %s in the data, you have to delete it first, or " + "set ignore_duplication=True, or use update_fx_prices" % code, + **log_attrs, ) return None self._add_fx_prices_without_checking_for_existing_entry(code, fx_price_data) - log.info("Added fx data for code %s" % code) + self.log.info("Added fx data for code %s" % code, **log_attrs) def update_fx_prices( self, code: str, new_fx_prices: fxPrices, check_for_spike=True @@ -185,7 +199,7 @@ def update_fx_prices( :param new_fx_prices: fxPrices object :return: int, number of rows added """ - log = self.log.setup(**{CURRENCY_CODE_LOG_LABEL: code}) + log_attrs = {CURRENCY_CODE_LOG_LABEL: code, "method": "temp"} old_fx_prices = self.get_fx_prices(code) merged_fx_prices = old_fx_prices.add_rows_to_existing_data( @@ -199,18 +213,22 @@ def update_fx_prices( if rows_added == 0: if len(old_fx_prices) == 0: - log.debug("No new or old prices for %s" % code) + self.log.debug("No new or old prices for %s" % code, **log_attrs) else: - log.debug( + self.log.debug( "No additional data since %s for %s" - % (str(old_fx_prices.index[-1]), code) + % (str(old_fx_prices.index[-1]), code), + **log_attrs, ) return 0 self.add_fx_prices(code, merged_fx_prices, ignore_duplication=True) - log.debug("Added %d additional rows for %s" % (rows_added, code)) + self.log.debug( + "Added %d additional rows for %s" % (rows_added, code), + **log_attrs, + ) return rows_added diff --git a/sysdata/sim/sim_data.py b/sysdata/sim/sim_data.py index 0855134eff..dc9d7ae4ea 100644 --- a/sysdata/sim/sim_data.py +++ b/sysdata/sim/sim_data.py @@ -71,6 +71,7 @@ def system_init(self, base_system: "System"): """ # inherit the log + # TODO log.setup self._log = base_system.log.setup(stage="data") self._parent = base_system diff --git a/sysexecution/algos/algo.py b/sysexecution/algos/algo.py index 86125e5a01..4bf7624ca9 100644 --- a/sysexecution/algos/algo.py +++ b/sysexecution/algos/algo.py @@ -85,7 +85,6 @@ def get_and_submit_broker_order_for_contract_order( broker_account: str = arg_not_supplied, ): - log = contract_order.log_with_attributes(self.data.log) broker = self.data_broker.get_broker_name() if broker_account is arg_not_supplied: @@ -129,9 +128,11 @@ def get_and_submit_broker_order_for_contract_order( limit_price=limit_price, ) - log.debug( + self.data.log.debug( "Created a broker order %s (not yet submitted or written to local DB)" - % str(broker_order) + % str(broker_order), + **contract_order.log_attributes(), + method="temp", ) placed_broker_order_with_controls = self.data_broker.submit_broker_order( @@ -139,12 +140,17 @@ def get_and_submit_broker_order_for_contract_order( ) if placed_broker_order_with_controls is missing_order: - log.warning("Order could not be submitted") + self.data.log.warning( + "Order could not be submitted", + **contract_order.log_attributes(), + method="temp", + ) return missing_order - log = placed_broker_order_with_controls.order.log_with_attributes(log) - log.debug( - "Submitted order to IB %s" % str(placed_broker_order_with_controls.order) + self.data.log.debug( + "Submitted order to IB %s" % str(placed_broker_order_with_controls.order), + **placed_broker_order_with_controls.order.log_attributes(), + method="temp", ) placed_broker_order_with_controls.add_or_replace_ticker(ticker_object) @@ -158,7 +164,6 @@ def get_market_data_for_order_modifies_ticker_object( # to provide a benchmark for execution purposes # (optionally) to set limit prices ## - log = contract_order.log_with_attributes(self.data.log) # Get the first 'reference' tick try: @@ -168,9 +173,11 @@ def get_market_data_for_order_modifies_ticker_object( ) ) except missingData: - log.warning( + self.data.log.warning( "Can't get market data for %s so not trading with limit order %s" - % (contract_order.instrument_code, str(contract_order)) + % (contract_order.instrument_code, str(contract_order)), + **contract_order.log_attributes(), + method="temp", ) raise @@ -225,10 +232,11 @@ def round_limit_price_to_tick_size( try: min_tick = self.data_broker.get_min_tick_size_for_contract(contract) except missingContract: - log = contract_order.log_with_attributes(self.data.log) - log.warning( + self.data.log.warning( "Couldn't find min tick size for %s, not rounding limit price %f" - % (str(contract), limit_price) + % (str(contract), limit_price), + **contract_order.log_attributes(), + method="temp", ) return limit_price diff --git a/sysexecution/algos/algo_market.py b/sysexecution/algos/algo_market.py index 6197b6ac5c..904a7cb328 100644 --- a/sysexecution/algos/algo_market.py +++ b/sysexecution/algos/algo_market.py @@ -45,10 +45,13 @@ def manage_trade( def prepare_and_submit_trade(self): contract_order = self.contract_order - log = contract_order.log_with_attributes(self.data.log) + log_attrs = {**contract_order.log_attributes(), "method": "temp"} if contract_order.panic_order: - log.debug("PANIC ORDER! DON'T RESIZE AND DO ENTIRE TRADE") + self.data.log.debug( + "PANIC ORDER! DON'T RESIZE AND DO ENTIRE TRADE", + **log_attrs, + ) cut_down_contract_order = copy(contract_order) else: cut_down_contract_order = contract_order.reduce_trade_size_proportionally_so_smallest_leg_is_max_size( @@ -56,9 +59,10 @@ def prepare_and_submit_trade(self): ) if cut_down_contract_order.trade != contract_order.trade: - log.debug( + self.data.log.debug( "Cut down order to size %s from %s because of algo size limit" - % (str(contract_order.trade), str(cut_down_contract_order.trade)) + % (str(contract_order.trade), str(cut_down_contract_order.trade)), + **log_attrs, ) order_type = self.order_type_to_use @@ -77,6 +81,7 @@ def order_type_to_use(self) -> brokerOrderType: def manage_live_trade( self, broker_order_with_controls: orderWithControls ) -> orderWithControls: + # TODO log_with_attributes log = broker_order_with_controls.order.log_with_attributes(self.data.log) data_broker = self.data_broker diff --git a/sysexecution/algos/algo_original_best.py b/sysexecution/algos/algo_original_best.py index 1b8a02646c..99c4e17a78 100644 --- a/sysexecution/algos/algo_original_best.py +++ b/sysexecution/algos/algo_original_best.py @@ -84,6 +84,7 @@ def prepare_and_submit_trade(self) -> orderWithControls: data = self.data contract_order = self.contract_order + # TODO log_with_attributes log = contract_order.log_with_attributes(data.log) ## check order type is 'best' not 'limit' or 'market' @@ -148,6 +149,7 @@ def manage_live_trade( ) -> orderWithControls: data = self.data + # TODO log_with_attributes log = broker_order_with_controls_and_order_id.order.log_with_attributes( data.log ) @@ -220,7 +222,7 @@ def manage_live_trade( return broker_order_with_controls_and_order_id -def limit_trade_viable( +def limit_trade_viable( # TODO passed logger instance data: dataBlob, order: contractOrder, ticker_object: tickerObject, log: pst_logger ) -> bool: @@ -248,7 +250,7 @@ def limit_trade_viable( no_need_to_switch = "_NO_NEED_TO_SWITCH" -def file_log_report( +def file_log_report( # TODO passed logger instance log, is_aggressive: bool, broker_order_with_controls: orderWithControls ): limit_trade = broker_order_with_controls.order.order_type == limit_order_type diff --git a/sysexecution/algos/allocate_algo_to_order.py b/sysexecution/algos/allocate_algo_to_order.py index 17ed31fbae..cd628fa88f 100644 --- a/sysexecution/algos/allocate_algo_to_order.py +++ b/sysexecution/algos/allocate_algo_to_order.py @@ -82,7 +82,7 @@ def check_and_if_required_allocate_algo_to_single_contract_order( ) -> contractOrder: config = get_algo_allocation_config(data) - log = contract_order.log_with_attributes(data.log) + log_attrs = {**contract_order.log_attributes(), "method": "temp"} if already_has_algo_allocated(contract_order): # Already done @@ -100,7 +100,10 @@ def check_and_if_required_allocate_algo_to_single_contract_order( contract_order=contract_order, config=config ) elif instrument_order_type == market_order_type: - log.debug("Market order type, so allocating to algo_market") + data.log.debug( + "Market order type, so allocating to algo_market", + **log_attrs, + ) contract_order = allocate_market_algo( contract_order=contract_order, config=config ) @@ -119,12 +122,16 @@ def check_and_if_required_allocate_algo_to_single_contract_order( ) elif instrument_order_type == balance_order_type: - log.critical("Balance orders aren't executed, shouldn't even be here!") + data.log.critical( + "Balance orders aren't executed, shouldn't even be here!", + **log_attrs, + ) return missing_order else: - log.warning( + data.log.warning( "Don't recognise order type %s so allocating to default %s" - % (instrument_order_type, config.default_algo) + % (instrument_order_type, config.default_algo), + **log_attrs, ) contract_order = allocate_default_algo( contract_order=contract_order, config=config @@ -170,9 +177,11 @@ def allocate_for_best_execution_no_limit( data: dataBlob, config: AlgoConfig, contract_order: contractOrder ) -> contractOrder: # in the future could be randomized... - log = contract_order.log_with_attributes(data.log) - - log.debug("'Best' order so allocating to original_best") + data.log.debug( + "'Best' order so allocating to original_best", + **contract_order.log_attributes(), + method="temp", + ) contract_order.algo_to_use = config.best_algo return contract_order @@ -182,8 +191,11 @@ def allocate_for_limit_order( data: dataBlob, config: AlgoConfig, contract_order: contractOrder ) -> contractOrder: # in the future could be randomized... - log = contract_order.log_with_attributes(data.log) - log.debug("Allocating to limit order") + data.log.debug( + "Allocating to limit order", + **contract_order.log_attributes(), + method="temp", + ) contract_order.algo_to_use = config.limit_order_algo return contract_order diff --git a/sysexecution/algos/common_functions.py b/sysexecution/algos/common_functions.py index 513a1987af..5f785516a8 100644 --- a/sysexecution/algos/common_functions.py +++ b/sysexecution/algos/common_functions.py @@ -34,7 +34,7 @@ def cancel_order( data: dataBlob, broker_order_with_controls: orderWithControls ) -> orderWithControls: - log = broker_order_with_controls.order.log_with_attributes(data.log) + log_attrs = {**broker_order_with_controls.order.log_attributes(), "method": "temp"} data_broker = dataBroker(data) data_broker.cancel_order_given_control_object(broker_order_with_controls) @@ -47,10 +47,13 @@ def cancel_order( broker_order_with_controls ) if is_cancelled: - log.debug("Cancelled order") + data.log.debug("Cancelled order", **log_attrs) break if timer.finished: - log.warning("Ran out of time to cancel order - may cause weird behaviour!") + data.log.warning( + "Ran out of time to cancel order - may cause weird behaviour!", + **log_attrs, + ) break return broker_order_with_controls @@ -62,7 +65,7 @@ def set_limit_price( new_limit_price: float, ): - log = broker_order_with_controls.order.log_with_attributes(data.log) + log_attrs = {**broker_order_with_controls.order.log_attributes(), "method": "temp"} data_broker = dataBroker(data) try: @@ -71,9 +74,15 @@ def set_limit_price( broker_order_with_controls, new_limit_price ) ) - log.debug("Tried to change limit price to %f" % new_limit_price) + data.log.debug( + "Tried to change limit price to %f" % new_limit_price, + **log_attrs, + ) except orderCannotBeModified as error: - log.debug("Can't modify limit price for order, error %s" % str(error)) + data.log.debug( + "Can't modify limit price for order, error %s" % str(error), + **log_attrs, + ) return broker_order_with_controls @@ -105,6 +114,7 @@ def check_current_limit_price_at_inside_spread( return new_limit_price +# TODO passed logger instance def file_log_report_market_order(log, broker_order_with_controls: orderWithControls): ticker_object = broker_order_with_controls.ticker diff --git a/sysexecution/order_stacks/contract_order_stack.py b/sysexecution/order_stacks/contract_order_stack.py index 8573c6505d..c83dbdfb0f 100644 --- a/sysexecution/order_stacks/contract_order_stack.py +++ b/sysexecution/order_stacks/contract_order_stack.py @@ -33,13 +33,16 @@ def add_controlling_algo_ref(self, order_id: int, control_algo_ref: str): modified_order.add_controlling_algo_ref(control_algo_ref) self._change_order_on_stack(order_id, modified_order) except Exception as e: - log = existing_order.log_with_attributes(self.log) error_msg = "%s couldn't add controlling algo %s to order %d" % ( str(e), control_algo_ref, order_id, ) - log.warning(error_msg) + self.log.warning( + error_msg, + **existing_order.log_attributes(), + method="temp", + ) raise Exception(error_msg) def release_order_from_algo_control(self, order_id: int): @@ -60,12 +63,15 @@ def release_order_from_algo_control(self, order_id: int): modified_order.release_order_from_algo_control() self._change_order_on_stack(order_id, modified_order) except Exception as e: - log = existing_order.log_with_attributes(self.log) error_msg = "%s couldn't remove controlling algo from order %d" % ( str(e), order_id, ) - log.warning(error_msg) + self.log.warning( + error_msg, + **existing_order.log_attributes(), + method="temp", + ) raise Exception(error_msg) def get_order_with_id_from_stack(self, order_id: int) -> contractOrder: diff --git a/sysexecution/order_stacks/instrument_order_stack.py b/sysexecution/order_stacks/instrument_order_stack.py index 7cb007c54d..3242f6eca8 100644 --- a/sysexecution/order_stacks/instrument_order_stack.py +++ b/sysexecution/order_stacks/instrument_order_stack.py @@ -94,14 +94,16 @@ def _put_new_order_on_stack_when_no_existing_order( ) -> int: # no current order for this instrument/strategy - log = new_order.log_with_attributes(self.log) + log_attrs = {**new_order.log_attributes(), "method": "temp"} if new_order.is_zero_trade() and not allow_zero_orders: log_msg = "Zero orders not allowed" - log.debug(log_msg) + self.log.debug(log_msg, **log_attrs) raise zeroOrderException(log_msg) - log.debug("New order %s putting on %s" % (str(new_order), str(self))) + self.log.debug( + "New order %s putting on %s" % (str(new_order), str(self)), **log_attrs + ) order_id = self._put_order_on_stack_and_get_order_id(new_order) @@ -119,6 +121,7 @@ def _put_adjusting_order_on_stack( :param new_order: :return: """ + # TODO log_with_attributes log = new_order.log_with_attributes(self.log) existing_orders = listOfOrders( @@ -145,7 +148,7 @@ def _put_adjusting_order_on_stack( return order_id -def calculate_adjusted_order_given_existing_orders( +def calculate_adjusted_order_given_existing_orders( # TODO passed logger instance new_order: instrumentOrder, existing_orders: listOfOrders, log ): diff --git a/sysexecution/order_stacks/order_stack.py b/sysexecution/order_stacks/order_stack.py index b53345dab7..b8231dd24e 100644 --- a/sysexecution/order_stacks/order_stack.py +++ b/sysexecution/order_stacks/order_stack.py @@ -88,14 +88,15 @@ def put_list_of_orders_on_stack( list_of_order_ids = [] for order in list_of_orders: - log = order.log_with_attributes(self.log) order.lock_order() try: order_id = self.put_order_on_stack(order) except Exception as e: - log.warning( + self.log.warning( "Failed to put order %s on stack error %s, rolling back entire transaction" - % (str(order), str(e)) + % (str(order), str(e)), + **order.log_attributes(), + method="temp", ) # rollback any orders we did manage to add @@ -255,15 +256,17 @@ def add_children_to_order_without_existing_children( self.log.warning(error_msg) raise missingOrder(error_msg) - log = existing_order.log_with_attributes(self.log) - already_have_children = not existing_order.no_children() if already_have_children: error_msg = ( "Can't add children to order that already has children %s" % str(existing_order.children) ) - log.warning(error_msg) + self.log.warning( + error_msg, + **existing_order.log_attributes(), + method="temp", + ) raise Exception(error_msg) new_order = copy(existing_order) @@ -319,7 +322,7 @@ def change_fill_quantity_for_order( # nout to do here, fills are cumulative return None - log = existing_order.log_with_attributes(self.log) + log_attrs = {**existing_order.log_attributes(), "method": "temp"} new_order = copy(existing_order) try: @@ -327,14 +330,15 @@ def change_fill_quantity_for_order( fill_qty, filled_price=filled_price, fill_datetime=fill_datetime ) except overFilledOrder as e: - log.warning(str(e)) + self.log.warning(str(e), **log_attrs) raise overFilledOrder(e) self._change_order_on_stack(order_id, new_order) - log.debug( + self.log.debug( "Changed fill qty from %s to %s for order %s" - % (str(existing_order.fill), str(fill_qty), str(existing_order)) + % (str(existing_order.fill), str(fill_qty), str(existing_order)), + **log_attrs, ) def zero_out(self, order_id: int): @@ -346,11 +350,13 @@ def zero_out(self, order_id: int): self.log.warning(error_msg) raise missingOrder(error_msg) - log = existing_order.log_with_attributes(existing_order) - if not existing_order.active: # already inactive - log.warning("Can't zero out order which is already inactive") + self.log.warning( + "Can't zero out order which is already inactive", + **existing_order.log_attributes(), + method="temp", + ) return None new_order = copy(existing_order) @@ -368,8 +374,6 @@ def deactivate_order(self, order_id: int): self.log.warning(error_msg) raise missingOrder(error_msg) - log = existing_order.log_with_attributes(self.log) - if not existing_order.active: # already inactive return None @@ -424,20 +428,20 @@ def _change_order_on_stack( self.log.warning(error_msg) raise missingOrder(error_msg) - log = existing_order.log_with_attributes(self.log) + log_attrs = {**existing_order.log_attributes(), "method": "temp"} lock_status = existing_order.is_order_locked() if lock_status is True: # already locked can't change error_msg = "Can't change locked order %s" % str(existing_order) - log.warning(error_msg) + self.log.warning(error_msg, **log_attrs) raise Exception(error_msg) if check_if_inactive: existing_order_is_inactive = not existing_order.active if existing_order_is_inactive: error_msg = "Can't change order %s as inactive" % str(existing_order) - log.warning(error_msg) + self.log.warning(error_msg, **log_attrs) self._change_order_on_stack_no_checking(order_id, new_order) @@ -466,10 +470,11 @@ def lock_order_on_stack(self, order_id: int): def _put_order_on_stack_and_get_order_id(self, order: Order) -> int: order_has_existing_id = not order.order_id is no_order_id if order_has_existing_id: - log = order.log_with_attributes(self.log) - log.warning( + self.log.warning( "Order %s already has order ID will be ignored and allocated a new ID!" - % str(order) + % str(order), + **order.log_attributes(), + method="temp", ) order_to_add = copy(order) diff --git a/sysexecution/orders/base_orders.py b/sysexecution/orders/base_orders.py index 415cbd2d46..a4cfed5d0d 100644 --- a/sysexecution/orders/base_orders.py +++ b/sysexecution/orders/base_orders.py @@ -453,6 +453,14 @@ def log_with_attributes(self, log): return log + def log_attributes(self): + """ + Returns a dict of order log attributes + :return: dict + """ + + return {} + def resolve_inputs_to_order(trade, fill) -> (tradeQuantity, tradeQuantity): resolved_trade = tradeQuantity(trade) diff --git a/sysexecution/orders/broker_orders.py b/sysexecution/orders/broker_orders.py index de0667e484..23cf282639 100644 --- a/sysexecution/orders/broker_orders.py +++ b/sysexecution/orders/broker_orders.py @@ -360,6 +360,24 @@ def log_with_attributes(self, log): return new_log + def log_attributes(self): + """ + Returns a dict of broker_order log attributes + + :return: dict + """ + broker_order = self + return { + STRATEGY_NAME_LOG_LABEL: broker_order.strategy_name, + INSTRUMENT_CODE_LOG_LABEL: broker_order.instrument_code, + CONTRACT_ORDER_ID_LOG_LABEL: if_object_matches_return_empty_string( + broker_order.parent, no_parent + ), + BROKER_ORDER_ID_LOG_LABEL: if_object_matches_return_empty_string( + broker_order.order_id, no_order_id + ), + } + def add_execution_details_from_matched_broker_order(self, matched_broker_order): fill_qty_okay = self.trade.fill_less_than_or_equal_to_desired_trade( matched_broker_order.fill diff --git a/sysexecution/orders/contract_orders.py b/sysexecution/orders/contract_orders.py index 83b37dd05c..b7ded12780 100644 --- a/sysexecution/orders/contract_orders.py +++ b/sysexecution/orders/contract_orders.py @@ -302,6 +302,23 @@ def log_with_attributes(self, log): return new_log + def log_attributes(self): + """ + Returns a dict of contract_order log attributes + + :return: dict + """ + return { + STRATEGY_NAME_LOG_LABEL: self.strategy_name, + INSTRUMENT_CODE_LOG_LABEL: self.instrument_code, + CONTRACT_ORDER_ID_LOG_LABEL: if_object_matches_return_empty_string( + self.order_id, no_order_id + ), + INSTRUMENT_ORDER_ID_LABEL: if_object_matches_return_empty_string( + self.parent, no_parent + ), + } + @dataclass class contractOrderKeyArguments: diff --git a/sysexecution/orders/instrument_orders.py b/sysexecution/orders/instrument_orders.py index 39e4fb16d1..11f35481f8 100644 --- a/sysexecution/orders/instrument_orders.py +++ b/sysexecution/orders/instrument_orders.py @@ -254,3 +254,17 @@ def log_with_attributes(self, log): ) return new_log + + def log_attributes(self): + """ + Returns a dict of instrument_order log attributes + + :return: dict + """ + return { + STRATEGY_NAME_LOG_LABEL: self.strategy_name, + INSTRUMENT_CODE_LOG_LABEL: self.instrument_code, + INSTRUMENT_ORDER_ID_LABEL: if_object_matches_return_empty_string( + self.order_id, no_order_id + ), + } diff --git a/sysexecution/stack_handler/balance_trades.py b/sysexecution/stack_handler/balance_trades.py index 3022898dbd..6aa60f5a6c 100644 --- a/sysexecution/stack_handler/balance_trades.py +++ b/sysexecution/stack_handler/balance_trades.py @@ -14,14 +14,14 @@ class stackHandlerCreateBalanceTrades(stackHandlerForFills): def create_balance_trade(self, broker_order: brokerOrder): - log = broker_order.log_with_attributes(self.log) + log_attrs = {**broker_order.log_attributes(), "method": "temp"} contract_order = create_balance_contract_order_from_broker_order(broker_order) instrument_order = create_balance_instrument_order_from_contract_order( contract_order ) - log.debug("Putting balancing trades on stacks") + self.log.debug("Putting balancing trades on stacks", **log_attrs) try: self.put_balance_trades_on_stack( @@ -30,7 +30,7 @@ def create_balance_trade(self, broker_order: brokerOrder): except failureWithRollback: return None - log.debug("Updating positions") + self.log.debug("Updating positions", **log_attrs) self.apply_position_change_to_stored_contract_positions( contract_order, contract_order.fill, apply_entire_trade=True ) @@ -38,7 +38,10 @@ def create_balance_trade(self, broker_order: brokerOrder): instrument_order, instrument_order.fill, apply_entire_trade=True ) - log.debug("Marking balancing trades as completed and historic order data") + self.log.debug( + "Marking balancing trades as completed and historic order data", + **log_attrs, + ) self.handle_completed_instrument_order( instrument_order.order_id, treat_inactive_as_complete=True ) @@ -49,8 +52,8 @@ def put_balance_trades_on_stack( contract_order: contractOrder, broker_order: brokerOrder, ): - log = instrument_order.log_with_attributes(self.log) - log.debug("Putting balancing trades on stacks") + log_attrs = {**instrument_order.log_attributes(), "method": "temp"} + self.log.debug("Putting balancing trades on stacks", **log_attrs) try: instrument_order_id = ( @@ -59,20 +62,22 @@ def put_balance_trades_on_stack( ) ) except Exception as e: - log.error( - "Couldn't add balancing instrument trade error condition %s" % str(e) + self.log.error( + "Couldn't add balancing instrument trade error condition %s" % str(e), + **log_attrs, ) - log.error("Nothing to roll back") + self.log.error("Nothing to roll back", **log_attrs) raise failureWithRollback from e try: contract_order.parent = instrument_order_id contract_order_id = self.contract_stack.put_order_on_stack(contract_order) except Exception as e: - log.error( - "Couldn't add balancing contract trade error condition %s " % str(e) + self.log.error( + "Couldn't add balancing contract trade error condition %s " % str(e), + **log_attrs, ) - log.error("Rolling back") + self.log.error("Rolling back", **log_attrs) self.rollback_balance_trades( instrument_order_id, missing_order, missing_order ) @@ -84,8 +89,11 @@ def put_balance_trades_on_stack( ) except Exception as e: - log.error("Couldn't add children to instrument order error %s" % str(e)) - log.error("Rolling back") + self.log.error( + "Couldn't add children to instrument order error %s" % str(e), + **log_attrs, + ) + self.log.error("Rolling back", **log_attrs) self.rollback_balance_trades( instrument_order_id, contract_order_id, missing_order ) @@ -95,8 +103,11 @@ def put_balance_trades_on_stack( try: broker_order_id = self.broker_stack.put_order_on_stack(broker_order) except Exception as e: - log.error("Couldn't add balancing broker trade error condition %s" % str(e)) - log.error("Rolling back") + self.log.error( + "Couldn't add balancing broker trade error condition %s" % str(e), + **log_attrs, + ) + self.log.error("Rolling back", **log_attrs) self.rollback_balance_trades( instrument_order_id, contract_order_id, missing_order ) @@ -107,8 +118,11 @@ def put_balance_trades_on_stack( contract_order_id, [broker_order_id] ) except Exception as e: - log.error("Couldn't add children to contract order exception %s" % str(e)) - log.error("Rolling back") + self.log.error( + "Couldn't add children to contract order exception %s" % str(e), + **log_attrs, + ) + self.log.error("Rolling back", **log_attrs) self.rollback_balance_trades( instrument_order_id, contract_order_id, broker_order_id ) @@ -117,7 +131,7 @@ def put_balance_trades_on_stack( contract_order.order_id = contract_order_id instrument_order.order_id = instrument_order_id - log.debug("All balancing trades added to stacks") + self.log.debug("All balancing trades added to stacks", **log_attrs) def rollback_balance_trades( self, instrument_order_id: int, contract_order_id: int, broker_order_id: int @@ -131,8 +145,8 @@ def rollback_balance_trades( self.broker_stack.remove_order_with_id_from_stack(broker_order_id) def create_balance_instrument_trade(self, instrument_order: instrumentOrder): - log = instrument_order.log_with_attributes(self.log) - log.debug("Putting balancing order on instrument stack") + log_attrs = {**instrument_order.log_attributes(), "method": "temp"} + self.log.debug("Putting balancing order on instrument stack", **log_attrs) instrument_order_id = ( self.instrument_stack.put_manual_order_on_stack_and_return_order_id( instrument_order @@ -141,8 +155,9 @@ def create_balance_instrument_trade(self, instrument_order: instrumentOrder): instrument_order.order_id = instrument_order_id - log.debug( - "Marking balancing trades as completed and updating positions and historic order data" + self.log.debug( + "Marking balancing trades as completed and updating positions and historic order data", + **log_attrs, ) self.apply_position_change_to_instrument( instrument_order, instrument_order.fill, apply_entire_trade=True diff --git a/sysexecution/stack_handler/cancel_and_modify.py b/sysexecution/stack_handler/cancel_and_modify.py index 1615544620..6d622897d6 100644 --- a/sysexecution/stack_handler/cancel_and_modify.py +++ b/sysexecution/stack_handler/cancel_and_modify.py @@ -65,8 +65,11 @@ def cancel_broker_order_with_id_and_return_order( # no need to cancel return missing_order - log = broker_order.log_with_attributes(self.log) - log.debug("Cancelling order on stack with broker %s" % str(broker_order)) + self.log.debug( + "Cancelling order on stack with broker %s" % str(broker_order), + **broker_order.log_attributes(), + method="temp", + ) data_broker = self.data_broker data_broker.cancel_order_on_stack(broker_order) @@ -101,9 +104,12 @@ def list_of_orders_not_yet_cancelled( order_is_cancelled = True if order_is_cancelled: - log = broker_order.log_with_attributes(self.log) new_list_of_orders.remove(broker_order) - log.debug("Order %s succesfully cancelled" % broker_order) + self.log.debug( + "Order %s succesfully cancelled" % broker_order, + **broker_order.log_attributes(), + method="temp", + ) new_list_of_orders = listOfOrders(new_list_of_orders) @@ -118,8 +124,9 @@ def check_order_cancelled(self, broker_order: brokerOrder) -> bool: def critical_cancel_log(self, list_of_broker_orders: listOfOrders): for broker_order in list_of_broker_orders: - log = broker_order.log_with_attributes(self.log) - log.critical( - "Broker order %s could not be cancelled within time limit; might be a position break" - % broker_order + self.log.critical( + "Broker order %s could not be cancelled within time limit; might be a " + "position break" % broker_order, + **broker_order.log_attributes(), + method="temp", ) diff --git a/sysexecution/stack_handler/create_broker_orders_from_contract_orders.py b/sysexecution/stack_handler/create_broker_orders_from_contract_orders.py index 02b3fa39ef..731c681daa 100644 --- a/sysexecution/stack_handler/create_broker_orders_from_contract_orders.py +++ b/sysexecution/stack_handler/create_broker_orders_from_contract_orders.py @@ -159,7 +159,6 @@ def size_contract_order( def apply_trade_limits_to_contract_order( self, proposed_order: contractOrder ) -> contractOrder: - log = proposed_order.log_with_attributes(self.log) data_trade_limits = dataTradeLimits(self.data) instrument_strategy = proposed_order.instrument_strategy @@ -178,13 +177,15 @@ def apply_trade_limits_to_contract_order( ) if contract_order_after_trade_limits.trade != proposed_order.trade: - log.debug( + self.log.debug( "%s trade change from %s to %s because of trade limits" % ( proposed_order.key, str(proposed_order.trade), str(contract_order_after_trade_limits.trade), - ) + ), + **proposed_order.log_attributes(), + method="temp", ) return contract_order_after_trade_limits @@ -194,7 +195,6 @@ def liquidity_size_contract_order( ) -> contractOrder: data_broker = self.data_broker - log = contract_order_after_trade_limits.log_with_attributes(self.log) # check liquidity, and if necessary carve up order # Note for spread orders we check liquidity in the component markets @@ -205,9 +205,11 @@ def liquidity_size_contract_order( ) if liquid_qty != contract_order_after_trade_limits.trade: - log.debug( + self.log.debug( "Cut down order to size %s from %s because of liquidity" - % (str(liquid_qty), str(contract_order_after_trade_limits.trade)) + % (str(liquid_qty), str(contract_order_after_trade_limits.trade)), + **contract_order_after_trade_limits.log_attributes(), + method="temp", ) if liquid_qty.equals_zero(): @@ -223,7 +225,6 @@ def send_to_algo( self, contract_order_to_trade: contractOrder ) -> (Algo, orderWithControls): - log = contract_order_to_trade.log_with_attributes(self.log) instrument_order = self.get_parent_of_contract_order(contract_order_to_trade) contract_order_to_trade_with_algo_set = ( @@ -234,12 +235,14 @@ def send_to_algo( ) ) - log.debug( + self.log.debug( "Sending order %s to algo %s" % ( str(contract_order_to_trade_with_algo_set), contract_order_to_trade_with_algo_set.algo_to_use, - ) + ), + **contract_order_to_trade.log_attributes(), + method="temp", ) algo_class_to_call = self.add_controlling_algo_to_order( @@ -298,7 +301,6 @@ def add_trade_to_database( broker_order = broker_order_with_controls_and_order_id.order - log = broker_order.log_with_attributes(self.log) try: broker_order_id = self.broker_stack.put_order_on_stack(broker_order) except Exception as e: @@ -309,7 +311,7 @@ def add_trade_to_database( "Created a broker order %s but can't add it to the order stack!! (condition %s) STACK CORRUPTED" % (str(broker_order), str(e)) ) - log.critical(error_msg) + self.log.critical(error_msg, **broker_order.log_attributes(), method="temp") raise Exception(error_msg) # set order_id (wouldn't have had one before, might be done inside db adding but make explicit) diff --git a/sysexecution/stack_handler/fills.py b/sysexecution/stack_handler/fills.py index dccc0ff643..0d23245936 100644 --- a/sysexecution/stack_handler/fills.py +++ b/sysexecution/stack_handler/fills.py @@ -58,10 +58,11 @@ def apply_broker_fill_from_broker_to_broker_database(self, broker_order_id: int) ) if matched_broker_order is missing_order: - log = db_broker_order.log_with_attributes(self.log) - log.warning( + self.log.warning( "Order in database %s does not match any broker orders: can't fill" - % db_broker_order + % db_broker_order, + **db_broker_order.log_attributes(), + method="temp", ) return None @@ -214,10 +215,11 @@ def apply_contract_fill_to_instrument_order(self, contract_order_id: int): instrument_order_id = contract_order.parent if instrument_order_id is no_parent: - log = contract_order.log_with_attributes(self.log) - log.error( + self.log.error( "No parent for contract order %s %d" - % (str(contract_order), contract_order_id) + % (str(contract_order), contract_order_id), + **contract_order.log_attributes(), + method="temp", ) return None @@ -254,7 +256,6 @@ def apply_contract_fill_to_parent_order_single_child( contract_order = self.contract_stack.get_order_with_id_from_stack( contract_order_id ) - log = contract_order.log_with_attributes(self.log) fill_for_contract = contract_order.fill filled_price = contract_order.filled_price @@ -274,9 +275,11 @@ def apply_contract_fill_to_parent_order_single_child( pass else: # A spread order that isn't flat - log.critical( + self.log.critical( "Can't handle non-flat intra-market spread orders! Instrument order %s %s" - % (str(instrument_order), str(instrument_order.order_id)) + % (str(instrument_order), str(instrument_order.order_id)), + **contract_order.log_attributes(), + method="temp", ) def apply_contract_fill_to_parent_order_multiple_children( @@ -287,8 +290,6 @@ def apply_contract_fill_to_parent_order_multiple_children( # - Leg by leg flat spread eg forced roll order: do nothing since doesn't change instrument positions # Distributed roll order eg if we are short -2 front, want to buy 3, will do +2 front +1 next - log = instrument_order.log_with_attributes(self.log) - distributed_order = self.check_to_see_if_distributed_instrument_order( list_of_contract_order_ids, instrument_order ) @@ -309,9 +310,11 @@ def apply_contract_fill_to_parent_order_multiple_children( else: # A proper spread trade across markets can't do this - log.critical( + self.log.critical( "Can't handle inter-market spread orders! Instrument order %s %s" - % (str(instrument_order), str(instrument_order.order_id)) + % (str(instrument_order), str(instrument_order.order_id)), + **instrument_order.log_attributes(), + method="temp", ) def check_to_see_if_distributed_instrument_order( diff --git a/sysexecution/stack_handler/roll_orders.py b/sysexecution/stack_handler/roll_orders.py index 2289445c54..ad0825803d 100644 --- a/sysexecution/stack_handler/roll_orders.py +++ b/sysexecution/stack_handler/roll_orders.py @@ -205,6 +205,7 @@ def add_instrument_and_list_of_contract_orders_to_stack( instrument_stack = self.instrument_stack contract_stack = self.contract_stack + # TODO log_with_attributes parent_log = instrument_order.log_with_attributes(self.log) # Do as a transaction: if everything doesn't go to plan can roll back @@ -502,9 +503,12 @@ def create_contract_roll_orders( contract_orders = create_contract_orders_outright(roll_spread_info) else: - log = instrument_order.log_with_attributes(data.log) roll_state = diag_positions.get_roll_state(instrument_code) - log.warning("Roll state %s is unexpected, might have changed" % str(roll_state)) + data.log.warning( + "Roll state %s is unexpected, might have changed" % str(roll_state), + **instrument_order.log_attributes(), + method="temp", + ) return missing_order contract_orders = allocate_algo_to_list_of_contract_orders( diff --git a/sysexecution/stack_handler/spawn_children_from_instrument_orders.py b/sysexecution/stack_handler/spawn_children_from_instrument_orders.py index 54db138d70..346c5c8e44 100644 --- a/sysexecution/stack_handler/spawn_children_from_instrument_orders.py +++ b/sysexecution/stack_handler/spawn_children_from_instrument_orders.py @@ -64,8 +64,11 @@ def spawn_children_from_instrument_order_id(self, instrument_order_id: int): self.data, instrument_order ) - log = instrument_order.log_with_attributes(self.log) - log.debug("List of contract orders spawned %s" % str(list_of_contract_orders)) + self.log.debug( + "List of contract orders spawned %s" % str(list_of_contract_orders), + **instrument_order.log_attributes(), + method="temp", + ) self.add_children_to_stack_and_child_id_to_parent( self.instrument_stack, @@ -82,6 +85,7 @@ def add_children_to_stack_and_child_id_to_parent( list_of_child_orders: listOfOrders, ): + # TODO log_with_attributes parent_log = parent_order.log_with_attributes(self.log) list_of_child_ids = put_children_on_stack( @@ -232,6 +236,7 @@ def get_required_contract_trade_for_instrument( :return: tuple: list of child orders: each is a tuple: contract str or missing_contract, trade int """ instrument_code = instrument_order.instrument_code + # TODO log_with_attributes log = instrument_order.log_with_attributes(data.log) trade = instrument_order.as_single_trade_qty_or_error() @@ -294,7 +299,7 @@ def get_required_contract_trade_for_instrument( return [] -def child_order_in_priced_contract_only( +def child_order_in_priced_contract_only( # TODO passed logger instance data: dataBlob, instrument_order: instrumentOrder, log ): diag_contracts = dataContracts(data) @@ -313,7 +318,7 @@ def passive_roll_child_order( instrument_order: instrumentOrder, ) -> list: - log = instrument_order.log_with_attributes(data.log) + log_attrs = {**instrument_order.log_attributes(), "method": "temp"} diag_positions = diagPositions(data) instrument_code = instrument_order.instrument_code trade = instrument_order.trade @@ -330,9 +335,10 @@ def passive_roll_child_order( if position_current_contract == 0: # Passive roll and no position in the current contract, start trading # the next contract - log.debug( + data.log.debug( "Passive roll handling order %s, no position in current contract, entire trade in next contract %s" - % (str(instrument_order), next_contract) + % (str(instrument_order), next_contract), + **log_attrs, ) return [contractIdAndTrade(next_contract, trade)] @@ -343,9 +349,10 @@ def passive_roll_child_order( if increasing_trade: # Passive roll and increasing trade # Do it all in next contract - log.debug( + data.log.debug( "Passive roll handling order %s, increasing trade, entire trade in next contract %s" - % (str(instrument_order), next_contract) + % (str(instrument_order), next_contract), + **log_attrs, ) return [contractIdAndTrade(next_contract, trade)] @@ -356,18 +363,20 @@ def passive_roll_child_order( ) if new_position == 0 or sign_of_position_is_unchanged: # A reducing trade that we can do entirely in the current contract - log.debug( + data.log.debug( "Passive roll handling order %s, reducing trade, entire trade in next contract %s" - % (str(instrument_order), next_contract) + % (str(instrument_order), next_contract), + **log_attrs, ) return [contractIdAndTrade(current_contract, trade)] # OKAY to recap: it's a passive roll, but the trade will be split between # current and next - log.debug( + data.log.debug( "Passive roll handling order %s, reducing trade, split trade between contract %s and %s" - % (str(instrument_order), current_contract, next_contract) + % (str(instrument_order), current_contract, next_contract), + **log_attrs, ) return passive_trade_split_over_two_contracts( @@ -550,15 +559,16 @@ def add_reference_price_to_a_direct_child_order( data, child_order, contract_to_match, price_to_adjust ) except missingData: - log = instrument_order.log_with_attributes(data.log) - log.warning( + data.log.warning( "Couldn't adjust reference price for order %s child %s going from %s to %s, can't do TCA" % ( str(instrument_order), str(child_order), contract_to_match, child_order.contract_date, - ) + ), + **instrument_order.log_attributes(), + method="temp", ) return child_order @@ -624,10 +634,11 @@ def calculate_limit_prices_for_direct_child_orders( child_order is missing_order for child_order in list_of_contract_orders ] if any(flag_missing_orders): - log = instrument_order.log_with_attributes(data.log) - log.critical( + data.log.critical( "Couldn't adjust limit price for at least one child order %s: can't execute any child orders" - % str(instrument_order) + % str(instrument_order), + **instrument_order.log_attributes(), + method="temp", ) return listOfOrders([]) @@ -661,15 +672,16 @@ def add_limit_price_to_a_direct_child_order( except missingData: # This is a serious problem # We can't possibly execute any part of the parent order - log = instrument_order.log_with_attributes(data.log) - log.critical( + data.log.critical( "Couldn't adjust limit price for order %s child %s going from %s to %s" % ( str(instrument_order), str(child_order), contract_to_match, child_order.contract_date, - ) + ), + **instrument_order.log_attributes(), + method="temp", ) return missing_order diff --git a/sysexecution/stack_handler/stackHandlerCore.py b/sysexecution/stack_handler/stackHandlerCore.py index 7f7c6b83ec..f145c988b2 100644 --- a/sysexecution/stack_handler/stackHandlerCore.py +++ b/sysexecution/stack_handler/stackHandlerCore.py @@ -95,7 +95,7 @@ def update_prices(self) -> updatePrices: return update_prices -def put_children_on_stack( +def put_children_on_stack( # TODO passed logger instance child_stack: orderStackData, parent_order: Order, list_of_child_orders: listOfOrders, @@ -123,7 +123,7 @@ def put_children_on_stack( return list_of_child_ids -def add_children_to_parent_or_rollback_children( +def add_children_to_parent_or_rollback_children( # TODO passed logger instance parent_order: Order, list_of_child_ids: list, parent_stack: orderStackData, @@ -152,7 +152,7 @@ def add_children_to_parent_or_rollback_children( return success -def log_successful_adding( +def log_successful_adding( # TODO passed logger instance list_of_child_orders: listOfOrders, list_of_child_ids: list, parent_order: Order, @@ -160,6 +160,7 @@ def log_successful_adding( ): for child_order, child_id in zip(list_of_child_orders, list_of_child_ids): + # TODO log_with_attributes child_log = child_order.log_with_attributes(parent_log) child_log.debug( "Put child order %s on stack with ID %d from parent order %s" diff --git a/sysexecution/strategies/classic_buffered_positions.py b/sysexecution/strategies/classic_buffered_positions.py index 9bd0ff6613..e563b464c1 100644 --- a/sysexecution/strategies/classic_buffered_positions.py +++ b/sysexecution/strategies/classic_buffered_positions.py @@ -179,8 +179,7 @@ def trade_given_optimal_and_actual_positions( reference_datetime=ref_date, ) - log = order_required.log_with_attributes(data.log) - log.debug( + data.log.debug( "Upper %.2f Lower %.2f Current %d Required position %d Required trade %d Reference price %f for contract %s" % ( upper_for_instrument, @@ -190,7 +189,9 @@ def trade_given_optimal_and_actual_positions( trade_required, reference_price, reference_contract, - ) + ), + **order_required.log_attributes(), + method="temp", ) return order_required diff --git a/sysexecution/strategies/dynamic_optimised_positions.py b/sysexecution/strategies/dynamic_optimised_positions.py index d70bae364e..b81b333ec4 100644 --- a/sysexecution/strategies/dynamic_optimised_positions.py +++ b/sysexecution/strategies/dynamic_optimised_positions.py @@ -741,8 +741,7 @@ def trade_given_optimal_and_actual_positions( reference_datetime=reference_date, ) - log = order_required.log_with_attributes(data.log) - log.debug( + data.log.debug( "Current %d Required position %d Required trade %d Reference price %f for contract %s" % ( current_position, @@ -750,7 +749,9 @@ def trade_given_optimal_and_actual_positions( trade_required, reference_price, reference_contract, - ) + ), + **order_required.log_attributes(), + method="temp", ) return order_required diff --git a/sysexecution/strategies/strategy_order_handling.py b/sysexecution/strategies/strategy_order_handling.py index 36ea397274..b3746cc98c 100644 --- a/sysexecution/strategies/strategy_order_handling.py +++ b/sysexecution/strategies/strategy_order_handling.py @@ -132,15 +132,16 @@ def apply_overrides_for_instrument_and_strategy( revised_order = override.apply_override(original_position, proposed_order) if revised_order.trade != proposed_order.trade: - log = proposed_order.log_with_attributes(self.log) - log.debug( + self.log.debug( "%s trade change from %s to %s because of override %s" % ( instrument_strategy.key, str(revised_order.trade), str(proposed_order.trade), str(override), - ) + ), + **proposed_order.log_attributes(), + method="temp", ) return revised_order @@ -149,7 +150,7 @@ def adjust_order_for_position_limits( self, order: instrumentOrder ) -> instrumentOrder: - log = order.log_with_attributes(self.log) + log_attrs = {**order.log_attributes(), "method": "temp"} data_position_limits = dataPositionLimits(self.data) new_order = data_position_limits.apply_position_limit_to_order(order) @@ -157,13 +158,15 @@ def adjust_order_for_position_limits( if new_order.trade != order.trade: if new_order.is_zero_trade(): ## at position limit, can't do anything - log.warning( - "Can't trade at all because of position limits %s" % str(order) + self.log.warning( + "Can't trade at all because of position limits %s" % str(order), + **log_attrs, ) else: - log.warning( + self.log.warning( "Can't do trade of %s because of position limits,instead will do %s" - % (str(order), str(new_order.trade)) + % (str(order), str(new_order.trade)), + **log_attrs, ) return new_order @@ -173,30 +176,32 @@ def submit_order_list(self, order_list: listOfOrders): for order in order_list: # try: # we allow existing orders to be modified - log = order.log_with_attributes(self.log) - log.debug("Required order %s" % str(order)) + log_attrs = {**order.log_attributes(), "method": "temp"} + self.log.debug("Required order %s" % str(order), **log_attrs) instrument_locked = data_lock.is_instrument_locked(order.instrument_code) if instrument_locked: - log.debug("Instrument locked, not submitting") + self.log.debug("Instrument locked, not submitting", **log_attrs) continue self.submit_order(order) def submit_order(self, order: instrumentOrder): - log = order.log_with_attributes(self.log) + log_attrs = {**order.log_attributes(), "method": "temp"} try: order_id = self.order_stack.put_order_on_stack(order) except zeroOrderException: # we checked for zero already, which means that there is an existing order on the stack # An existing order of the same size - log.warning( - "Ignoring new order as either zero size or it replicates an existing order on the stack" + self.log.warning( + "Ignoring new order as either zero size or it replicates an existing order on the stack", + **log_attrs, ) else: - log.debug( + self.log.debug( "Added order %s to instrument order stack with order id %d" % (str(order), order_id), instrument_order_id=order_id, + **log_attrs, ) diff --git a/sysinit/futures/seed_price_data_from_IB.py b/sysinit/futures/seed_price_data_from_IB.py index 40716e2bb8..f15eaa6737 100644 --- a/sysinit/futures/seed_price_data_from_IB.py +++ b/sysinit/futures/seed_price_data_from_IB.py @@ -32,16 +32,16 @@ def seed_price_data_from_IB(instrument_code): def seed_price_data_for_contract(data: dataBlob, contract_object: futuresContract): - log = contract_object.specific_log(data.log) + log_attrs = {**contract_object.log_attributes(), "method": "temp"} list_of_frequencies = [HOURLY_FREQ, DAILY_PRICE_FREQ] for frequency in list_of_frequencies: - log.debug("Getting data at frequency %s" % str(frequency)) + data.log.debug("Getting data at frequency %s" % str(frequency), **log_attrs) seed_price_data_for_contract_at_frequency( data=data, contract_object=contract_object, frequency=frequency ) - log.debug("Writing merged data for %s" % str(contract_object)) + data.log.debug("Writing merged data for %s" % str(contract_object), **log_attrs) write_merged_prices_for_contract( data, contract_object=contract_object, list_of_frequencies=list_of_frequencies ) @@ -53,7 +53,7 @@ def seed_price_data_for_contract_at_frequency( data_broker = dataBroker(data) update_prices = updatePrices(data) - log = contract_object.specific_log(data.log) + log_attrs = {**contract_object.log_attributes(), "method": "temp"} try: prices = ( @@ -62,13 +62,22 @@ def seed_price_data_for_contract_at_frequency( ) ) except missingData: - log.warning("Error getting data for %s" % str(contract_object)) + data.log.warning( + "Error getting data for %s" % str(contract_object), + **log_attrs, + ) return None - log.debug("Got %d lines of prices for %s" % (len(prices), str(contract_object))) + data.log.debug( + "Got %d lines of prices for %s" % (len(prices), str(contract_object)), + **log_attrs, + ) if len(prices) == 0: - log.warning("No price data for %s" % str(contract_object)) + data.log.warning( + "No price data for %s" % str(contract_object), + **log_attrs, + ) else: update_prices.overwrite_prices_at_frequency_for_contract( contract_object=contract_object, frequency=frequency, new_prices=prices diff --git a/sysobjects/contracts.py b/sysobjects/contracts.py index 3c18bd5873..25b99dd6f9 100644 --- a/sysobjects/contracts.py +++ b/sysobjects/contracts.py @@ -90,7 +90,7 @@ def __init__( self._contract_date = contract_date_object self._params = parameter_object - def specific_log(self, log): + def specific_log(self, log): # TODO remove new_log = log.setup( **{ INSTRUMENT_CODE_LOG_LABEL: self.instrument_code, @@ -100,6 +100,17 @@ def specific_log(self, log): return new_log + def log_attributes(self): + """ + Returns a dict of futuresContract log attributes + + :return: dict + """ + return { + INSTRUMENT_CODE_LOG_LABEL: self.instrument_code, + CONTRACT_DATE_LOG_LABEL: self.date_str, + } + @property def instrument(self): return self._instrument @@ -146,11 +157,6 @@ def sampling_on(self): def sampling_off(self): self.params.sampling = False - def log(self, log: pst_logger): - return log.setup( - instrument_code=self.instrument_code, contract_date=self.date_str - ) - def as_dict(self): """ Turn into a dict. We only include instrument_code from the instrument_object, the rest would be found elsewhere diff --git a/sysproduction/data/positions.py b/sysproduction/data/positions.py index 8e7e47a4eb..671eae55f2 100644 --- a/sysproduction/data/positions.py +++ b/sysproduction/data/positions.py @@ -306,10 +306,11 @@ def update_expiry_for_single_contract( original_contract.instrument_code, original_contract.contract_date ) except ContractNotFound: - log = original_contract.specific_log(self.data.log) - log.warning( + self.data.log.warning( "Contract %s is missing from database - expiry not found and will mismatch" - % str(original_contract) + % str(original_contract), + **original_contract.log_attributes(), + method="temp", ) new_contract = copy(original_contract) else: @@ -465,8 +466,7 @@ def update_strategy_position_table_with_instrument_order( instrument_strategy, new_position_as_int ) - log = original_instrument_order.log_with_attributes(self.log) - log.debug( + self.log.debug( "Updated position of %s from %d to %d because of trade %s %d fill %s" % ( str(instrument_strategy), @@ -475,7 +475,9 @@ def update_strategy_position_table_with_instrument_order( str(original_instrument_order), original_instrument_order.order_id, str(new_fill), - ) + ), + **original_instrument_order.log_attributes(), + method="temp", ) return success @@ -496,20 +498,20 @@ def update_contract_position_table_with_contract_order( time_date = datetime.datetime.now() - log = contract_order_before_fills.log_with_attributes(self.log) - for contract, trade_done in zip(list_of_individual_contracts, fill_list): self._update_positions_for_individual_contract_leg( contract=contract, trade_done=trade_done, time_date=time_date ) - log.debug( + self.log.debug( "Updated position of %s because of trade %s ID:%d with fills %d" % ( str(contract), str(contract_order_before_fills), contract_order_before_fills.order_id, trade_done, - ) + ), + **contract_order_before_fills.log_attributes(), + method="temp", ) def _update_positions_for_individual_contract_leg( @@ -526,15 +528,16 @@ def _update_positions_for_individual_contract_leg( # check new_position_db = self.diag_positions.get_position_for_contract(contract) - log = contract.specific_log(self.log) - log.debug( + self.log.debug( "Updated position of %s from %d to %d; new position in db is %d" % ( str(contract), current_position, new_position, new_position_db, - ) + ), + **contract.log_attributes(), + method="temp", ) diff --git a/sysproduction/interactive_update_roll_status.py b/sysproduction/interactive_update_roll_status.py index cbe881160b..56280c8c7a 100644 --- a/sysproduction/interactive_update_roll_status.py +++ b/sysproduction/interactive_update_roll_status.py @@ -572,7 +572,6 @@ def manually_update_roll_state_for_code( # First get the roll info # This will also update to console - data.log.setup(instrument_code=instrument_code) roll_state_suggested = suggest_roll_state_for_instrument( roll_data=roll_data, auto_parameters=auto_parameters ) diff --git a/sysproduction/update_historical_prices.py b/sysproduction/update_historical_prices.py index ef74b5a62d..2347cbc832 100644 --- a/sysproduction/update_historical_prices.py +++ b/sysproduction/update_historical_prices.py @@ -320,6 +320,7 @@ def update_historical_prices_for_instrument( return failure for contract_object in contract_list: + # TODO specific_log data.update_log(contract_object.specific_log(data.log)) update_historical_prices_for_instrument_and_contract( contract_object, diff --git a/sysproduction/update_sampled_contracts.py b/sysproduction/update_sampled_contracts.py index 2aeec70cf7..e60631eb40 100644 --- a/sysproduction/update_sampled_contracts.py +++ b/sysproduction/update_sampled_contracts.py @@ -275,9 +275,12 @@ def mark_existing_contract_as_sampling( ): data_contracts = dataContracts(data) data_contracts.mark_contract_as_sampling(contract_to_add) - log = contract_to_add.specific_log(data.log) - log.debug("Contract %s now sampling" % str(contract_to_add)) + data.log.debug( + "Contract %s now sampling" % str(contract_to_add), + **contract_to_add.log_attributes(), + method="temp", + ) def add_new_contract_with_sampling_on(contract_to_add: futuresContract, data: dataBlob): @@ -290,9 +293,11 @@ def add_new_contract_with_sampling_on(contract_to_add: futuresContract, data: da # Should not be any duplication to ignore data_contracts.add_contract_data(contract_to_add, ignore_duplication=False) - log = contract_to_add.specific_log(data.log) - - log.debug("Contract %s now added to database and sampling" % str(contract_to_add)) + data.log.debug( + "Contract %s now added to database and sampling" % str(contract_to_add), + **contract_to_add.log_attributes(), + method="temp", + ) def update_expiries_and_sampling_status_for_contracts( @@ -334,7 +339,7 @@ def update_expiry_and_sampling_status_for_contract( OK_TO_SAMPLE = "okay to sample" unsample_reason = OK_TO_SAMPLE - log = contract_object.specific_log(data.log) + log_attrs = {**contract_object.log_attributes(), "method": "temp"} data_contracts = dataContracts(data) db_contract = data_contracts.get_contract_from_db(contract_object) @@ -343,18 +348,20 @@ def update_expiry_and_sampling_status_for_contract( try: broker_expiry_date = get_contract_expiry_from_broker(contract_object, data=data) except missingContract: - log.debug( + data.log.debug( "Can't find expiry for %s, could be a connection problem but could be because contract has already expired" - % (str(contract_object)) + % (str(contract_object)), + **log_attrs, ) ## As probably expired we'll remove it from the sampling list unsample_reason = "Contract not available from IB" else: if broker_expiry_date == db_expiry_date: - log.debug( + data.log.debug( "No change to contract expiry %s to %s" - % (str(contract_object), str(broker_expiry_date)) + % (str(contract_object), str(broker_expiry_date)), + **log_attrs, ) else: # Different! @@ -376,10 +383,11 @@ def update_expiry_and_sampling_status_for_contract( if turn_off_sampling: # Mark it as stop sampling in the database data_contracts.mark_contract_as_not_sampling(contract_object) - log.debug( + data.log.debug( "Contract %s %s so now stopped sampling" % (str(contract_object), unsample_reason), contract_date=contract_object.date_str, + **log_attrs, ) @@ -402,11 +410,11 @@ def update_contract_object_with_new_expiry_date( contract_object, new_expiry_date=broker_expiry_date ) - log = contract_object.specific_log(data.log) - - log.debug( + data.log.debug( "Updated expiry of contract %s to %s" - % (str(contract_object), str(broker_expiry_date)) + % (str(contract_object), str(broker_expiry_date)), + **contract_object.log_attributes(), + method="temp", ) diff --git a/systems/stage.py b/systems/stage.py index 59e56ff75f..2161624a99 100644 --- a/systems/stage.py +++ b/systems/stage.py @@ -38,6 +38,7 @@ def system_init(self, system: System): self._parent = system # and a log + # TODO log.setup log = system.log.setup(stage=self.name) self._log = log From 2e5b6ceaafe066e485feb6b65109bab51cf25090 Mon Sep 17 00:00:00 2001 From: Andy Geach Date: Tue, 10 Oct 2023 16:40:27 +0100 Subject: [PATCH 020/235] removing usages of log.setup() which have no effect --- syscontrol/run_process.py | 2 -- syscontrol/timer_functions.py | 2 -- 2 files changed, 4 deletions(-) diff --git a/syscontrol/run_process.py b/syscontrol/run_process.py index b8008d0236..d06abbbee7 100644 --- a/syscontrol/run_process.py +++ b/syscontrol/run_process.py @@ -70,8 +70,6 @@ def list_of_timer_functions(self) -> listOfTimerFunctions: return self._list_of_timer_functions def _setup(self): - # TODO log.setup - self.data.log.setup(type=self.process_name) self._log = self.data.log data_control = dataControlProcess(self.data) self._data_control = data_control diff --git a/syscontrol/timer_functions.py b/syscontrol/timer_functions.py index 41a889207b..551ce3e8b4 100644 --- a/syscontrol/timer_functions.py +++ b/syscontrol/timer_functions.py @@ -27,8 +27,6 @@ def __init__( self._data = data self._parameters = parameters - # TODO log.setup - log.setup(type=self.process_name) self._log = log self._report_status = reportStatus(log) From 7708841bc2ae8a3d1af3bc8f929119179dd4dada Mon Sep 17 00:00:00 2001 From: Andy Geach Date: Tue, 10 Oct 2023 22:40:27 +0100 Subject: [PATCH 021/235] removing unnecessary use of log.setup(), logger is set up in __init__() --- sysdata/config/configdata.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/sysdata/config/configdata.py b/sysdata/config/configdata.py index 934d0720ca..48cefb0a59 100644 --- a/sysdata/config/configdata.py +++ b/sysdata/config/configdata.py @@ -189,10 +189,6 @@ def system_init(self, base_system): :return: nothing """ - # inherit the log - # TODO log.setup - setattr(self, "log", base_system.log.setup(stage="config")) - # fill with defaults self.fill_with_defaults() From f851a7ad37bfd67af3a4ac14407a1235a0e74625 Mon Sep 17 00:00:00 2001 From: Andy Geach Date: Tue, 10 Oct 2023 22:49:55 +0100 Subject: [PATCH 022/235] removing unnecessary use of log.setup(), system_init() adds 'stage' attribute to 'base_system' logger --- systems/stage.py | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/systems/stage.py b/systems/stage.py index 2161624a99..1cbd219186 100644 --- a/systems/stage.py +++ b/systems/stage.py @@ -38,14 +38,11 @@ def system_init(self, system: System): self._parent = system # and a log - # TODO log.setup - log = system.log.setup(stage=self.name) - self._log = log + self._log = get_logger("base_system", {STAGE_LOG_LABEL: self.name}) @property - def log(self) -> pst_logger: - log = getattr(self, "_log", get_logger("")) - return log + def log(self): + return self._log @property def parent(self) -> System: From 17d0d53a5a242212f0868629bb9914e5aec2914c Mon Sep 17 00:00:00 2001 From: Andy Geach Date: Tue, 10 Oct 2023 22:52:32 +0100 Subject: [PATCH 023/235] removing unnecessary use of log.setup(), system_init() adds 'stage' attribute to 'base_system' logger --- sysdata/sim/sim_data.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/sysdata/sim/sim_data.py b/sysdata/sim/sim_data.py index dc9d7ae4ea..c5b6eb0a1a 100644 --- a/sysdata/sim/sim_data.py +++ b/sysdata/sim/sim_data.py @@ -9,7 +9,7 @@ resample_prices_to_business_day_index, ) from sysdata.base_data import baseData - +from syslogging.logger import * from sysobjects.spot_fx_prices import fxPrices from sysobjects.instruments import instrumentCosts @@ -71,8 +71,7 @@ def system_init(self, base_system: "System"): """ # inherit the log - # TODO log.setup - self._log = base_system.log.setup(stage="data") + self._log = get_logger("base_system", {STAGE_LOG_LABEL: "data"}) self._parent = base_system @property From 079bb16200e0b3f7ce92d21685a3fa574b618b63 Mon Sep 17 00:00:00 2001 From: todd <3578666+tgibson11@users.noreply.github.com> Date: Wed, 11 Oct 2023 10:07:06 -0700 Subject: [PATCH 024/235] Improve code for handling when risk overlay is not configured (cherry picked from commit 572512f338b8692555252ba082da596b62faf676) --- .../strategy_code/report_system_classic.py | 9 +++++---- systems/portfolio.py | 17 +++++++---------- 2 files changed, 12 insertions(+), 14 deletions(-) diff --git a/sysproduction/strategy_code/report_system_classic.py b/sysproduction/strategy_code/report_system_classic.py index c5d0497c8a..ab01f09db2 100644 --- a/sysproduction/strategy_code/report_system_classic.py +++ b/sysproduction/strategy_code/report_system_classic.py @@ -529,11 +529,12 @@ def risk_scaling_string(backtest) -> str: backtest_system_portfolio_stage.get_leverage_for_original_position().iloc[-1] ) percentage_vol_target = backtest_system_portfolio_stage.get_percentage_vol_target() - risk_scalar = backtest_system_portfolio_stage.get_risk_scalar() - if type(risk_scalar) is pd.Series: - risk_scalar_final = risk_scalar.iloc[-1] + try: + risk_scalar = backtest_system_portfolio_stage.get_risk_scalar() + except missingData: + risk_scalar_final = 1.0 else: - risk_scalar_final = risk_scalar + risk_scalar_final = risk_scalar.iloc[-1] risk_overlay_config = ( backtest_system_portfolio_stage.config.get_element_or_arg_not_supplied( "risk_overlay" diff --git a/systems/portfolio.py b/systems/portfolio.py index 219b2f410c..8b653a60e6 100644 --- a/systems/portfolio.py +++ b/systems/portfolio.py @@ -212,16 +212,18 @@ def get_notional_position(self, instrument_code: str) -> pd.Series: self.get_notional_position_before_risk_scaling(instrument_code) ) - risk_scalar = self.get_risk_scalar() - if type(risk_scalar) is pd.Series: + try: + risk_scalar = self.get_risk_scalar() + except missingData: + self.log.debug("No risk overlay in config: won't apply risk scaling") + notional_position = notional_position_without_risk_scalar + else: risk_scalar_reindex = risk_scalar.reindex( notional_position_without_risk_scalar.index ) notional_position = ( notional_position_without_risk_scalar * risk_scalar_reindex.ffill() ) - else: - notional_position = notional_position_without_risk_scalar return notional_position @@ -960,12 +962,7 @@ def capital_multiplier(self): @diagnostic() def get_risk_scalar(self) -> pd.Series: - risk_overlay_config = self.config.get_element_or_arg_not_supplied( - "risk_overlay" - ) - if risk_overlay_config is arg_not_supplied: - self.log.debug("No risk overlay in config: won't apply risk scaling") - return 1.0 + risk_overlay_config = self.config.get_element("risk_overlay") normal_risk = self.get_portfolio_risk_for_original_positions() shocked_vol_risk = ( From 2ca048effeafee683e676db79a36c16c4132d042 Mon Sep 17 00:00:00 2001 From: Andy Geach Date: Fri, 13 Oct 2023 12:59:12 +0100 Subject: [PATCH 025/235] refactoring away log.setup() in dataBlob --- sysdata/data_blob.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/sysdata/data_blob.py b/sysdata/data_blob.py index efdc0db9df..c743bb4a0c 100644 --- a/sysdata/data_blob.py +++ b/sysdata/data_blob.py @@ -208,8 +208,7 @@ def csv_data_paths(self) -> dict: def _get_specific_logger(self, class_object): class_name = get_class_name(class_object) - # TODO log.setup - log = self.log.setup(**{COMPONENT_LOG_LABEL: class_name}) + log = get_logger(self.log.name, {COMPONENT_LOG_LABEL: class_name}) return log @@ -244,7 +243,7 @@ def _already_existing_class_name(self, attr_name: str): def _add_attr_to_list(self, new_attr: str): self._attr_list.append(new_attr) - def update_log(self, new_log: pst_logger): + def update_log(self, new_log): self._log = new_log """ From 7e5e566ed9b2d13d3ad4f68b65af32024155ea17 Mon Sep 17 00:00:00 2001 From: Andy Geach Date: Fri, 13 Oct 2023 13:00:28 +0100 Subject: [PATCH 026/235] refactoring away contract.specific_log(), which uses log.setup() --- sysproduction/update_historical_prices.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/sysproduction/update_historical_prices.py b/sysproduction/update_historical_prices.py index 2347cbc832..4421353528 100644 --- a/sysproduction/update_historical_prices.py +++ b/sysproduction/update_historical_prices.py @@ -19,6 +19,7 @@ from sysdata.tools.cleaner import priceFilterConfig, get_config_for_price_filtering from syslogdiag.email_via_db_interface import send_production_mail_msg +from syslogging.logger import * from sysobjects.contracts import futuresContract from sysobjects.futures_per_contract_prices import futuresContractPrices @@ -320,8 +321,7 @@ def update_historical_prices_for_instrument( return failure for contract_object in contract_list: - # TODO specific_log - data.update_log(contract_object.specific_log(data.log)) + data.update_log(get_logger(data.log.name, **contract_object.log_attributes())) update_historical_prices_for_instrument_and_contract( contract_object, data, From 35add5533da4e0aa62a386fc2b000642371c3513 Mon Sep 17 00:00:00 2001 From: Andy Geach Date: Fri, 13 Oct 2023 13:00:49 +0100 Subject: [PATCH 027/235] updating logging TODOs --- syslogging/adapter.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/syslogging/adapter.py b/syslogging/adapter.py index ece8771f0e..8bff87ddaa 100644 --- a/syslogging/adapter.py +++ b/syslogging/adapter.py @@ -9,9 +9,6 @@ class DynamicAttributeLogger(logging.LoggerAdapter): """ # TODO futures_contract.specific_log - # TODO data_blob.update_log - # TODO data.update_log(contract_object.specific_log(data.log)) - # TODO data_blob._get_specific_logger # TODO log_with_attributes """ From bfc5114eb9de1ece04d949bb0b8af09adfbdf3d4 Mon Sep 17 00:00:00 2001 From: Andy Geach Date: Tue, 17 Oct 2023 14:31:14 +0100 Subject: [PATCH 028/235] safe logger setup --- systems/stage.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/systems/stage.py b/systems/stage.py index 1cbd219186..5ee02c9a9e 100644 --- a/systems/stage.py +++ b/systems/stage.py @@ -42,7 +42,10 @@ def system_init(self, system: System): @property def log(self): - return self._log + log = getattr( + self, "_log", get_logger("base_system", {STAGE_LOG_LABEL: self.name}) + ) + return log @property def parent(self) -> System: From 0f373795cbb2f18443f8f28f3b1f24a3b7ab6f84 Mon Sep 17 00:00:00 2001 From: Andy Geach Date: Tue, 17 Oct 2023 16:07:57 +0100 Subject: [PATCH 029/235] fix logger attributes --- sysproduction/update_historical_prices.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sysproduction/update_historical_prices.py b/sysproduction/update_historical_prices.py index 4421353528..833b71abc7 100644 --- a/sysproduction/update_historical_prices.py +++ b/sysproduction/update_historical_prices.py @@ -321,7 +321,7 @@ def update_historical_prices_for_instrument( return failure for contract_object in contract_list: - data.update_log(get_logger(data.log.name, **contract_object.log_attributes())) + data.update_log(get_logger(data.log.name, contract_object.log_attributes())) update_historical_prices_for_instrument_and_contract( contract_object, data, From cf146e32ecc790420ac469615826b422cb73424b Mon Sep 17 00:00:00 2001 From: Andy Geach Date: Tue, 17 Oct 2023 16:18:14 +0100 Subject: [PATCH 030/235] remove pst_logger type hints --- docs/data.md | 2 +- sysbrokers/IB/client/ib_client.py | 4 +--- sysbrokers/IB/client/ib_contracts_client.py | 2 +- sysbrokers/IB/client/ib_price_client.py | 12 ++++-------- sysbrokers/IB/config/ib_instrument_config.py | 6 +++--- sysbrokers/IB/ib_capital_data.py | 2 +- sysbrokers/broker_capital_data.py | 4 +--- syscontrol/report_process_status.py | 2 +- sysdata/data_blob.py | 4 ++-- sysexecution/algos/algo_original_best.py | 14 ++++++-------- sysexecution/orders/base_orders.py | 2 +- sysexecution/orders/broker_orders.py | 2 +- sysexecution/orders/contract_orders.py | 2 +- sysexecution/orders/instrument_orders.py | 2 +- syslogging/adapter.py | 4 ---- sysquant/optimisation/generic_optimiser.py | 2 +- systems/basesystem.py | 2 +- .../dynamic_small_system_optimise/optimisation.py | 2 +- 18 files changed, 28 insertions(+), 42 deletions(-) diff --git a/docs/data.md b/docs/data.md index e513450157..8f5c46ce1a 100644 --- a/docs/data.md +++ b/docs/data.md @@ -980,7 +980,7 @@ Here's a quick whistle-stop tour of dataBlob's other features: - you can create it with a starting class list by passing the `parameter class_list=...` -- it includes a `log` attribute that is passed to create data storage instances (you can override this by passing in a pst_logger via the `log=` parameter when dataBlob is created), the log will have top level type attribute as defined by the log_name parameter +- it includes a `log` attribute that is passed to create data storage instances (you can override this by passing in a logger via the `log=` parameter when dataBlob is created), the log will have top level type attribute as defined by the log_name parameter - when required it creates a `mongoDb` instance that is passed to create data storage instances (you can override this by passing in a `mongoDb` instance via the `mongo_db=` parameter when dataBlob is created) - when required it creates a `connectionIB` instance that is passed to create data storage instances (you can override this by passing in a connection instance via the `ib_conn=` parameter when dataBlob is created) - The parameter `csv_data_paths` will allow you to use different .csv data paths, not the defaults. The dict should have the keys of the class names, and values will be the paths to use. diff --git a/sysbrokers/IB/client/ib_client.py b/sysbrokers/IB/client/ib_client.py index 7180604c8c..b53d0ec2d2 100644 --- a/sysbrokers/IB/client/ib_client.py +++ b/sysbrokers/IB/client/ib_client.py @@ -64,9 +64,7 @@ class ibClient(object): """ - def __init__( - self, ibconnection: connectionIB, log: pst_logger = get_logger("ibClient") - ): + def __init__(self, ibconnection: connectionIB, log=get_logger("ibClient")): # means our first call won't be throttled for pacing self.last_historic_price_calltime = ( diff --git a/sysbrokers/IB/client/ib_contracts_client.py b/sysbrokers/IB/client/ib_contracts_client.py index c26251e408..c3793985e5 100644 --- a/sysbrokers/IB/client/ib_contracts_client.py +++ b/sysbrokers/IB/client/ib_contracts_client.py @@ -543,7 +543,7 @@ def _get_vanilla_ib_futures_contract( return resolved_contract - def ib_resolve_unique_contract(self, ibcontract_pattern, log: pst_logger = None): + def ib_resolve_unique_contract(self, ibcontract_pattern, log=None): """ Returns the 'resolved' IB contract based on a pattern. We expect a unique contract. diff --git a/sysbrokers/IB/client/ib_price_client.py b/sysbrokers/IB/client/ib_price_client.py index 45a4cb5580..327d2d846a 100644 --- a/sysbrokers/IB/client/ib_price_client.py +++ b/sysbrokers/IB/client/ib_price_client.py @@ -179,7 +179,7 @@ def _ib_get_recent_bid_ask_tick_data_using_reqHistoricalTicks( def _get_generic_data_for_contract( # TODO passed logger instance self, ibcontract: ibContract, - log: pst_logger = None, + log=None, bar_freq: Frequency = DAILY_PRICE_FREQ, whatToShow: str = "TRADES", ) -> pd.DataFrame: @@ -215,9 +215,7 @@ def _get_generic_data_for_contract( # TODO passed logger instance return price_data_as_df - def _raw_ib_data_to_df( - self, price_data_raw: pd.DataFrame, log: pst_logger - ) -> pd.DataFrame: + def _raw_ib_data_to_df(self, price_data_raw: pd.DataFrame, log) -> pd.DataFrame: if price_data_raw is None: log.warning("No price data from IB") @@ -272,7 +270,7 @@ def _ib_get_historical_data_of_duration_and_barSize( durationStr: str = "1 Y", barSizeSetting: str = "1 day", whatToShow="TRADES", - log: pst_logger = None, + log=None, ) -> pd.DataFrame: """ Returns historical prices for a contract, up to today @@ -345,9 +343,7 @@ def _get_barsize_and_duration_from_frequency(bar_freq: Frequency) -> (str, str): return ib_barsize, ib_duration -def _avoid_pacing_violation( - last_call_datetime: datetime.datetime, log: pst_logger = get_logger("") -): +def _avoid_pacing_violation(last_call_datetime: datetime.datetime, log=get_logger("")): printed_warning_already = False while _pause_for_pacing(last_call_datetime): if not printed_warning_already: diff --git a/sysbrokers/IB/config/ib_instrument_config.py b/sysbrokers/IB/config/ib_instrument_config.py index ee35650f27..83d3bc0bb7 100644 --- a/sysbrokers/IB/config/ib_instrument_config.py +++ b/sysbrokers/IB/config/ib_instrument_config.py @@ -22,7 +22,7 @@ class IBconfig(pd.DataFrame): ) -def read_ib_config_from_file(log: pst_logger = get_logger("")) -> IBconfig: +def read_ib_config_from_file(log=get_logger("")) -> IBconfig: try: df = pd.read_csv(IB_FUTURES_CONFIG_FILE) except Exception as e: @@ -33,7 +33,7 @@ def read_ib_config_from_file(log: pst_logger = get_logger("")) -> IBconfig: def get_instrument_object_from_config( - instrument_code: str, config: IBconfig = None, log: pst_logger = get_logger("") + instrument_code: str, config: IBconfig = None, log=get_logger("") ) -> futuresInstrumentWithIBConfigData: log_attrs = {INSTRUMENT_CODE_LOG_LABEL: instrument_code, "method": "temp"} @@ -113,7 +113,7 @@ class IBInstrumentIdentity: def get_instrument_code_from_broker_instrument_identity( config: IBconfig, ib_instrument_identity: IBInstrumentIdentity, - log: pst_logger = get_logger(""), + log=get_logger(""), ) -> str: ib_code = ib_instrument_identity.ib_code diff --git a/sysbrokers/IB/ib_capital_data.py b/sysbrokers/IB/ib_capital_data.py index 69cb9bb533..c89c819664 100644 --- a/sysbrokers/IB/ib_capital_data.py +++ b/sysbrokers/IB/ib_capital_data.py @@ -14,7 +14,7 @@ def __init__( self, ibconnection: connectionIB, data: dataBlob, - log: pst_logger = get_logger("ibCapitalData"), + log=get_logger("ibCapitalData"), ): super().__init__(log=log, data=data) self._ibconnection = ibconnection diff --git a/sysbrokers/broker_capital_data.py b/sysbrokers/broker_capital_data.py index a6c5470981..17dabeeea1 100644 --- a/sysbrokers/broker_capital_data.py +++ b/sysbrokers/broker_capital_data.py @@ -8,9 +8,7 @@ class brokerCapitalData(capitalData): - def __init__( - self, data: dataBlob, log: pst_logger = get_logger("brokerCapitalData") - ): + def __init__(self, data: dataBlob, log=get_logger("brokerCapitalData")): super().__init__(log=log) self._data = data diff --git a/syscontrol/report_process_status.py b/syscontrol/report_process_status.py index 5d7e54429d..5778b1130d 100644 --- a/syscontrol/report_process_status.py +++ b/syscontrol/report_process_status.py @@ -10,7 +10,7 @@ class reportStatus(object): ## Report on status when waiting and paused, ensures we don't spam the log - def __init__(self, log: pst_logger = arg_not_supplied): + def __init__(self, log=arg_not_supplied): if log is arg_not_supplied: log = get_logger("") self._log = log diff --git a/sysdata/data_blob.py b/sysdata/data_blob.py index c743bb4a0c..48362a4188 100644 --- a/sysdata/data_blob.py +++ b/sysdata/data_blob.py @@ -18,7 +18,7 @@ def __init__( csv_data_paths: dict = arg_not_supplied, ib_conn: connectionIB = arg_not_supplied, mongo_db: mongoDb = arg_not_supplied, - log: pst_logger = arg_not_supplied, + log=arg_not_supplied, keep_original_prefix: bool = False, ): """ @@ -40,7 +40,7 @@ def __init__( This abstracts the precise data source :param arg_string: str like a named tuple in the form 'classNameOfData1 classNameOfData2' and so on - :param log_name: pst_logger type to set + :param log_name: logger name :param keep_original_prefix: bool. If True then: data = dataBlob([arcticFuturesContractPriceData, arcticFuturesContractPriceData, mongoFuturesContractData]) diff --git a/sysexecution/algos/algo_original_best.py b/sysexecution/algos/algo_original_best.py index 99c4e17a78..a6a07de6b4 100644 --- a/sysexecution/algos/algo_original_best.py +++ b/sysexecution/algos/algo_original_best.py @@ -223,7 +223,7 @@ def manage_live_trade( def limit_trade_viable( # TODO passed logger instance - data: dataBlob, order: contractOrder, ticker_object: tickerObject, log: pst_logger + data: dataBlob, order: contractOrder, ticker_object: tickerObject, log ) -> bool: # no point doing limit order if we've got imbalanced size issues, as we'd @@ -286,7 +286,7 @@ def file_log_report_limit_order( def reason_to_switch_to_aggressive( - data: dataBlob, broker_order_with_controls: orderWithControls, log: pst_logger + data: dataBlob, broker_order_with_controls: orderWithControls, log ) -> str: ticker_object = broker_order_with_controls.ticker @@ -329,7 +329,7 @@ def reason_to_switch_to_aggressive( def is_market_about_to_close( data: dataBlob, order: Union[brokerOrder, contractOrder, orderWithControls], - log: pst_logger, + log, ) -> bool: data_broker = dataBroker(data) @@ -353,7 +353,7 @@ def required_to_switch_to_aggressive(reason: str) -> bool: def adverse_size_issue( - ticker_object: tickerObject, log: pst_logger, wait_for_valid_tick=False + ticker_object: tickerObject, log, wait_for_valid_tick=False ) -> bool: if wait_for_valid_tick: current_tick_analysis = ( @@ -377,9 +377,7 @@ def adverse_size_issue( return False -def _is_imbalance_ratio_exceeded( - current_tick_analysis: analysisTick, log: pst_logger -) -> bool: +def _is_imbalance_ratio_exceeded(current_tick_analysis: analysisTick, log) -> bool: latest_imbalance_ratio = current_tick_analysis.imbalance_ratio latest_imbalance_ratio_exceeded = latest_imbalance_ratio > IMBALANCE_THRESHOLD @@ -393,7 +391,7 @@ def _is_imbalance_ratio_exceeded( def _is_insufficient_size_on_our_preferred_side( - ticker_object: tickerObject, current_tick_analysis: analysisTick, log: pst_logger + ticker_object: tickerObject, current_tick_analysis: analysisTick, log ) -> bool: abs_size_we_wish_to_trade = abs(ticker_object.qty) size_we_require_to_trade_limit = IMBALANCE_ADJ_FACTOR * abs_size_we_wish_to_trade diff --git a/sysexecution/orders/base_orders.py b/sysexecution/orders/base_orders.py index a4cfed5d0d..d897979082 100644 --- a/sysexecution/orders/base_orders.py +++ b/sysexecution/orders/base_orders.py @@ -447,7 +447,7 @@ def log_with_attributes(self, log): """ Returns a new log object with order attributes added - :param log: pst_logger + :param log: logger :return: log """ diff --git a/sysexecution/orders/broker_orders.py b/sysexecution/orders/broker_orders.py index 23cf282639..324c2a8f05 100644 --- a/sysexecution/orders/broker_orders.py +++ b/sysexecution/orders/broker_orders.py @@ -341,7 +341,7 @@ def log_with_attributes(self, log): """ Returns a new log object with broker_order attributes added - :param log: pst_logger + :param log: logger :return: log """ broker_order = self diff --git a/sysexecution/orders/contract_orders.py b/sysexecution/orders/contract_orders.py index b7ded12780..675095486a 100644 --- a/sysexecution/orders/contract_orders.py +++ b/sysexecution/orders/contract_orders.py @@ -284,7 +284,7 @@ def log_with_attributes(self, log): """ Returns a new log object with contract_order attributes added - :param log: pst_logger + :param log: logger :return: log """ new_log = log.setup( diff --git a/sysexecution/orders/instrument_orders.py b/sysexecution/orders/instrument_orders.py index 11f35481f8..6f3539424e 100644 --- a/sysexecution/orders/instrument_orders.py +++ b/sysexecution/orders/instrument_orders.py @@ -232,7 +232,7 @@ def log_with_attributes(self, log): """ Returns a new log object with instrument_order attributes added - :param log: pst_logger + :param log: logger :return: log """ new_log = log.setup( diff --git a/syslogging/adapter.py b/syslogging/adapter.py index 8bff87ddaa..0bc60ddd9f 100644 --- a/syslogging/adapter.py +++ b/syslogging/adapter.py @@ -82,7 +82,3 @@ def _check_attributes(self, attributes: dict): raise Exception( "Attributes %s not allowed in log" % str(bad_attributes) ) - - -class pst_logger(DynamicAttributeLogger): - pass diff --git a/sysquant/optimisation/generic_optimiser.py b/sysquant/optimisation/generic_optimiser.py index acaf06cfe5..9393ec123f 100644 --- a/sysquant/optimisation/generic_optimiser.py +++ b/sysquant/optimisation/generic_optimiser.py @@ -28,7 +28,7 @@ def net_returns(self) -> returnsForOptimisation: return self._net_returns @property - def log(self) -> pst_logger: + def log(self): return self._log @property diff --git a/systems/basesystem.py b/systems/basesystem.py index 302b9815e1..61489c6e90 100644 --- a/systems/basesystem.py +++ b/systems/basesystem.py @@ -44,7 +44,7 @@ def __init__( stage_list: list, data: simData, config: Config = arg_not_supplied, - log: pst_logger = get_logger("base_system"), + log=get_logger("base_system"), ): """ Create a system object for doing simulations or live trading diff --git a/systems/provided/dynamic_small_system_optimise/optimisation.py b/systems/provided/dynamic_small_system_optimise/optimisation.py index 6ea008e32a..c9e00cfec4 100644 --- a/systems/provided/dynamic_small_system_optimise/optimisation.py +++ b/systems/provided/dynamic_small_system_optimise/optimisation.py @@ -39,7 +39,7 @@ def __init__( previous_positions: portfolioWeights = arg_not_supplied, constraints: constraintsForDynamicOpt = arg_not_supplied, maximum_positions: portfolioWeights = arg_not_supplied, - log: pst_logger = get_logger("objectiveFunctionForGreedy"), + log=get_logger("objectiveFunctionForGreedy"), ): self.covariance_matrix = covariance_matrix From 99ed02a2269d85567f92afb225898ec32bd906d9 Mon Sep 17 00:00:00 2001 From: Andy Geach Date: Tue, 17 Oct 2023 22:11:32 +0100 Subject: [PATCH 031/235] better logging for submit_order() --- sysexecution/strategies/strategy_order_handling.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/sysexecution/strategies/strategy_order_handling.py b/sysexecution/strategies/strategy_order_handling.py index b3746cc98c..c667ea532e 100644 --- a/sysexecution/strategies/strategy_order_handling.py +++ b/sysexecution/strategies/strategy_order_handling.py @@ -11,7 +11,7 @@ from sysexecution.orders.list_of_orders import listOfOrders from sysexecution.orders.instrument_orders import instrumentOrder from sysexecution.order_stacks.instrument_order_stack import zeroOrderException - +from syslogging.logger import * from sysproduction.data.positions import diagPositions from sysproduction.data.orders import dataOrders from sysproduction.data.controls import diagOverrides, dataLocks, dataPositionLimits @@ -190,11 +190,14 @@ def submit_order(self, order: instrumentOrder): try: order_id = self.order_stack.put_order_on_stack(order) + log_attrs[INSTRUMENT_ORDER_ID_LABEL] = order_id except zeroOrderException: - # we checked for zero already, which means that there is an existing order on the stack + # we checked for zero already, which means that there is an existing order + # on the stack # An existing order of the same size self.log.warning( - "Ignoring new order as either zero size or it replicates an existing order on the stack", + "Ignoring new order as either zero size or it replicates an existing " + "order on the stack", **log_attrs, ) @@ -202,6 +205,5 @@ def submit_order(self, order: instrumentOrder): self.log.debug( "Added order %s to instrument order stack with order id %d" % (str(order), order_id), - instrument_order_id=order_id, **log_attrs, ) From 24a9c0fdbf3b707c4159d5849c097f3bbd05384a Mon Sep 17 00:00:00 2001 From: Andy Geach Date: Wed, 18 Oct 2023 12:05:18 +0100 Subject: [PATCH 032/235] add missing dict expanders ** --- sysbrokers/IB/client/ib_contracts_client.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sysbrokers/IB/client/ib_contracts_client.py b/sysbrokers/IB/client/ib_contracts_client.py index c3793985e5..d78e6139e0 100644 --- a/sysbrokers/IB/client/ib_contracts_client.py +++ b/sysbrokers/IB/client/ib_contracts_client.py @@ -337,7 +337,7 @@ def ib_get_contract_details(self, contract_object_with_ib_data: futuresContract) except missingContract: self.log.warning( "Can't get trading hours as contract is missing", - contract_object_with_ib_data.log_attributes(), + **contract_object_with_ib_data.log_attributes(), method="temp", ) raise From de693e5020b33cf2a595ccd56b28bad7cf48fe9c Mon Sep 17 00:00:00 2001 From: Andy Geach Date: Wed, 18 Oct 2023 12:05:59 +0100 Subject: [PATCH 033/235] fix temp fx log attributes --- sysbrokers/IB/ib_Fx_prices_data.py | 6 ++---- sysdata/arctic/arctic_spotfx_prices.py | 6 ++---- sysdata/fx/spotfx.py | 6 ++---- 3 files changed, 6 insertions(+), 12 deletions(-) diff --git a/sysbrokers/IB/ib_Fx_prices_data.py b/sysbrokers/IB/ib_Fx_prices_data.py index f252faf1c4..b8339dac49 100644 --- a/sysbrokers/IB/ib_Fx_prices_data.py +++ b/sysbrokers/IB/ib_Fx_prices_data.py @@ -54,8 +54,7 @@ def _get_fx_prices_without_checking(self, currency_code: str) -> fxPrices: except missingInstrument: self.log.warning( "Can't get prices as missing IB config for %s" % currency_code, - CURRENCY_CODE_LOG_LABEL=currency_code, - method="temp", + **{CURRENCY_CODE_LOG_LABEL: currency_code, "method": "temp"}, ) return fxPrices.create_empty() @@ -115,8 +114,7 @@ def _get_config_info_for_code(self, currency_code: str) -> ibFXConfig: except missingFile as e: self.log.warning( "Can't get IB FX config for %s as config file missing" % currency_code, - CURRENCY_CODE_LOG_LABEL=currency_code, - method="temp", + **{CURRENCY_CODE_LOG_LABEL: currency_code, "method": "temp"}, ) raise missingInstrument from e diff --git a/sysdata/arctic/arctic_spotfx_prices.py b/sysdata/arctic/arctic_spotfx_prices.py index 4eb56b730d..57b92a518c 100644 --- a/sysdata/arctic/arctic_spotfx_prices.py +++ b/sysdata/arctic/arctic_spotfx_prices.py @@ -39,8 +39,7 @@ def _delete_fx_prices_without_any_warning_be_careful(self, currency_code: str): self.arctic.delete(currency_code) self.log.debug( "Deleted fX prices for %s from %s" % (currency_code, str(self)), - CURRENCY_CODE_LOG_LABEL=currency_code, - method="temp", + **{CURRENCY_CODE_LOG_LABEL: currency_code, "method": "temp"}, ) def _add_fx_prices_without_checking_for_existing_entry( @@ -54,6 +53,5 @@ def _add_fx_prices_without_checking_for_existing_entry( self.log.debug( "Wrote %s lines of prices for %s to %s" % (len(fx_price_data), currency_code, str(self)), - CURRENCY_CODE_LOG_LABEL=currency_code, - method="temp", + **{CURRENCY_CODE_LOG_LABEL: currency_code, "method": "temp"}, ) diff --git a/sysdata/fx/spotfx.py b/sysdata/fx/spotfx.py index d949dddc79..e52ea2b5af 100644 --- a/sysdata/fx/spotfx.py +++ b/sysdata/fx/spotfx.py @@ -86,8 +86,7 @@ def _get_fx_prices_for_inversion(self, fx_code: str) -> fxPrices: self.log.warning( "Data for %s is missing, needed to calculate %s" % (currency2 + DEFAULT_CURRENCY, DEFAULT_CURRENCY + currency2), - CURRENCY_CODE_LOG_LABEL=fx_code, - method="temp", + **{CURRENCY_CODE_LOG_LABEL: fx_code, "method": "temp"}, ) return raw_fx_data @@ -132,8 +131,7 @@ def _get_fx_prices(self, code: str) -> fxPrices: if not self.is_code_in_data(code): self.log.warning( "Currency %s is missing from list of FX data" % code, - CURRENCY_CODE_LOG_LABEL=code, - method="temp", + **{CURRENCY_CODE_LOG_LABEL: code, "method": "temp"}, ) return fxPrices.create_empty() From 04027f0c882f7d03cc64a07860861c259aa0365e Mon Sep 17 00:00:00 2001 From: Andy Geach Date: Wed, 18 Oct 2023 13:39:37 +0100 Subject: [PATCH 034/235] adding more tests for temp method --- syslogging/tests/logging_tests.py | 64 +++++++++++++++++++++++++++++++ 1 file changed, 64 insertions(+) diff --git a/syslogging/tests/logging_tests.py b/syslogging/tests/logging_tests.py index eebf7116b7..6023b6e8a4 100644 --- a/syslogging/tests/logging_tests.py +++ b/syslogging/tests/logging_tests.py @@ -1,5 +1,6 @@ import pytest from syslogging.logger import * +from sysobjects.contracts import futuresContract class TestLogging: @@ -78,6 +79,69 @@ def test_attributes_temp(self, caplog): "{'stage': 'first'} no type attribute", ) + def test_contract_log_attributes(self, caplog): + contract_logger = get_logger("contract") + contract = futuresContract( + instrument_object="AUD", contract_date_object="20231200" + ) + log_attrs = contract.log_attributes() + contract_logger.info( + "setting temp contract attributes", **log_attrs, method="temp" + ) + assert caplog.record_tuples[0] == ( + "contract", + logging.INFO, + "{'instrument_code': 'AUD', 'contract_date': '20231200'} setting temp " + "contract attributes", + ) + contract_logger.info("no contract attributes") + assert caplog.record_tuples[1] == ( + "contract", + logging.INFO, + "no contract attributes", + ) + + def test_contract_log_attributes_inline(self, caplog): + contract_inline = get_logger("contract_inline") + contract = futuresContract( + instrument_object="AUD", contract_date_object="20231200" + ) + contract_inline.info( + "setting temp contract attributes inline", + **contract.log_attributes(), + method="temp", + ) + assert caplog.record_tuples[0] == ( + "contract_inline", + logging.INFO, + "{'instrument_code': 'AUD', 'contract_date': '20231200'} setting temp " + "contract attributes inline", + ) + contract_inline.info("no contract attributes") + assert caplog.record_tuples[1] == ( + "contract_inline", + logging.INFO, + "no contract attributes", + ) + + def test_fx_log_attributes(self, caplog): + fx = get_logger("fx") + fx.info( + "setting temp fx attributes inline", + **{CURRENCY_CODE_LOG_LABEL: "USDAUD", "method": "temp"}, + ) + assert caplog.record_tuples[0] == ( + "fx", + logging.INFO, + "{'currency_code': 'USDAUD'} setting temp fx attributes inline", + ) + fx.info("no contract attributes") + assert caplog.record_tuples[1] == ( + "fx", + logging.INFO, + "no contract attributes", + ) + def test_setup(self): logger = get_logger("my_type", {"stage": "bar"}) logger = logger.setup(stage="left") From 346bdd54a1da6589bb3e212750b21f5228b3c139 Mon Sep 17 00:00:00 2001 From: Andy Geach Date: Wed, 18 Oct 2023 13:39:52 +0100 Subject: [PATCH 035/235] expanding log attributes --- sysbrokers/IB/client/ib_contracts_client.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sysbrokers/IB/client/ib_contracts_client.py b/sysbrokers/IB/client/ib_contracts_client.py index d78e6139e0..096b36a98c 100644 --- a/sysbrokers/IB/client/ib_contracts_client.py +++ b/sysbrokers/IB/client/ib_contracts_client.py @@ -295,7 +295,7 @@ def ib_get_min_tick_size( self.log.warning( "%s when getting min tick size from %s!" % (str(e), str(ib_contract_details)), - log_attrs, + **log_attrs, ) raise missingContract From 737643dbd17e14ca0d5ec4bc31c80e2707143215 Mon Sep 17 00:00:00 2001 From: Andy Geach Date: Wed, 18 Oct 2023 13:40:16 +0100 Subject: [PATCH 036/235] fixing duplicated log attribute --- sysproduction/update_sampled_contracts.py | 1 - 1 file changed, 1 deletion(-) diff --git a/sysproduction/update_sampled_contracts.py b/sysproduction/update_sampled_contracts.py index e60631eb40..2f50d90bd2 100644 --- a/sysproduction/update_sampled_contracts.py +++ b/sysproduction/update_sampled_contracts.py @@ -386,7 +386,6 @@ def update_expiry_and_sampling_status_for_contract( data.log.debug( "Contract %s %s so now stopped sampling" % (str(contract_object), unsample_reason), - contract_date=contract_object.date_str, **log_attrs, ) From c7311d879da5d04d2652123af1bee78b9f49f932 Mon Sep 17 00:00:00 2001 From: rob Date: Wed, 18 Oct 2023 15:36:13 +0100 Subject: [PATCH 037/235] fix #1269 --- sysdata/config/defaults.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sysdata/config/defaults.yaml b/sysdata/config/defaults.yaml index 420899532c..9e24a8d990 100644 --- a/sysdata/config/defaults.yaml +++ b/sysdata/config/defaults.yaml @@ -21,7 +21,7 @@ strategy_capital_allocation: # example: 100.0 # ## Where do we save backtests -backtest_store_directory: 'private' +backtest_store_directory: 'private.backtests' # # And backups csv_backup_directory: 'data.backups_csv' From ffd28017d210bd134e8fe7105ee709fdf9438dbf Mon Sep 17 00:00:00 2001 From: rob Date: Wed, 18 Oct 2023 15:45:08 +0100 Subject: [PATCH 038/235] fix #1270 --- sysobjects/contract_dates_and_expiries.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/sysobjects/contract_dates_and_expiries.py b/sysobjects/contract_dates_and_expiries.py index 1c7f3f951d..5da915a995 100644 --- a/sysobjects/contract_dates_and_expiries.py +++ b/sysobjects/contract_dates_and_expiries.py @@ -49,10 +49,10 @@ def from_str(expiryDate, date_as_str: str): as_date = datetime.datetime.strptime(date_as_str, EXPIRY_DATE_FORMAT) except: raise Exception( - "Expiry date %s not in format %s" % date_as_str, EXPIRY_DATE_FORMAT + "Expiry date %s not in format %s" % (date_as_str, EXPIRY_DATE_FORMAT) ) - return expiryDate(as_date.year, as_date.month, as_date.day) + return expiryDate(year=as_date.year, month=as_date.month, day=as_date.day) def as_str(self) -> str: return self.strftime(EXPIRY_DATE_FORMAT) From f688034b12b4cc7041c477eaf328c18f3f04be50 Mon Sep 17 00:00:00 2001 From: Andy Geach Date: Thu, 19 Oct 2023 09:54:30 +0100 Subject: [PATCH 039/235] removing risk_overlay config from example tests, except where explicitly needed --- tests/test_examples.py | 6 ------ 1 file changed, 6 deletions(-) diff --git a/tests/test_examples.py b/tests/test_examples.py index ebe9821be6..290c5862c6 100644 --- a/tests/test_examples.py +++ b/tests/test_examples.py @@ -51,7 +51,6 @@ def my_config(ewmac_8, ewmac_32): my_config.trading_rules = dict(ewmac8=ewmac_8, ewmac32=ewmac_32) my_config.instruments = ["US10", "EDOLLAR", "CORN", "SP500"] my_config.notional_trading_capital = 1000000 - my_config.risk_overlay = arg_not_supplied my_config.exclude_instrument_lists = dict( ignore_instruments=["MILK"], trading_restrictions=["BUTTER"], @@ -309,7 +308,6 @@ def test_simple_system_config_object(self, data, ewmac_8, ewmac_32): percentage_vol_target=25.00, notional_trading_capital=500000, base_currency="GBP", - risk_overlay=arg_not_supplied, exclude_instrument_lists=dict( ignore_instruments=["MILK"], trading_restrictions=["BUTTER"], @@ -379,7 +377,6 @@ def test_simple_system_risk_overlay(self, data, ewmac_8, ewmac_32): def test_simple_system_config_import(self, data): my_config = Config("systems.provided.example.simplesystemconfig.yaml") - my_config.risk_overlay = arg_not_supplied my_config.exclude_instrument_lists = dict( ignore_instruments=["MILK"], trading_restrictions=["BUTTER"], @@ -418,7 +415,6 @@ def test_prebaked_simple_system(self): This is the simple system from 'examples.introduction.prebakedsimplesystems' """ my_system = simplesystem() - my_system.config.risk_overlay = arg_not_supplied print(my_system) print(my_system.portfolio.get_notional_position("EDOLLAR").tail(5)) @@ -428,7 +424,6 @@ def test_prebaked_from_confg(self): This is the config system from 'examples.introduction.prebakedsimplesystems' """ my_config = Config("systems.provided.example.simplesystemconfig.yaml") - my_config.risk_overlay = arg_not_supplied my_data = csvFuturesSimData() my_system = simplesystem(config=my_config, data=my_data) print(my_system.portfolio.get_notional_position("EDOLLAR").tail(5)) @@ -440,7 +435,6 @@ def test_prebaked_chapter15(self): but without graph plotting """ system = base_futures_system() - system.config.risk_overlay = arg_not_supplied print(system.accounts.portfolio().sharpe()) @staticmethod From 79bf044590bcf364a508abb19d2c0d9d61867aa2 Mon Sep 17 00:00:00 2001 From: todd <3578666+tgibson11@users.noreply.github.com> Date: Thu, 19 Oct 2023 12:43:20 -0700 Subject: [PATCH 040/235] Handle edge case when rounding (cherry picked from commit 316abf868fb50eadd874b481675143f0c563ca43) --- syscore/genutils.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/syscore/genutils.py b/syscore/genutils.py index 31adbc660d..2da06f07d1 100755 --- a/syscore/genutils.py +++ b/syscore/genutils.py @@ -86,6 +86,9 @@ def round_significant_figures(x: float, figures: int = 3) -> float: >>> round_significant_figures(0.0234, 2) 0.023 """ + if x == 0: + return 0.0 + return round(x, figures - int(math.floor(math.log10(abs(x)))) - 1) From ec1c616cf77247d29eb22271c8db2709f6e92439 Mon Sep 17 00:00:00 2001 From: Andy Geach Date: Fri, 20 Oct 2023 08:47:15 +0100 Subject: [PATCH 041/235] refactoring away contract.log(), which uses log.setup() --- sysbrokers/IB/ib_futures_contract_price_data.py | 15 ++++++++++----- 1 file changed, 10 insertions(+), 5 deletions(-) diff --git a/sysbrokers/IB/ib_futures_contract_price_data.py b/sysbrokers/IB/ib_futures_contract_price_data.py index dabefa80bd..dbfc2e3e04 100644 --- a/sysbrokers/IB/ib_futures_contract_price_data.py +++ b/sysbrokers/IB/ib_futures_contract_price_data.py @@ -231,7 +231,10 @@ def _get_prices_at_frequency_for_ibcontract_object_no_checking( allow_expired: bool = False, ) -> futuresContractPrices: - new_log = contract_object_with_ib_broker_config.log(self.log) + log_attrs = { + **contract_object_with_ib_broker_config.log_attributes(), + "method": "temp", + } try: price_data = self.ib_client.broker_get_historical_futures_data_for_contract( @@ -240,16 +243,18 @@ def _get_prices_at_frequency_for_ibcontract_object_no_checking( allow_expired=allow_expired, ) except missingData: - new_log.warning( + self.log.warning( "Something went wrong getting IB price data for %s" - % str(contract_object_with_ib_broker_config) + % str(contract_object_with_ib_broker_config), + **log_attrs, ) raise if len(price_data) == 0: - new_log.warning( + self.log.warning( "No IB price data found for %s" - % str(contract_object_with_ib_broker_config) + % str(contract_object_with_ib_broker_config), + **log_attrs, ) return futuresContractPrices.create_empty() From 20c6e7219d8e06af7c32672d55eefaefdfc328a6 Mon Sep 17 00:00:00 2001 From: todd <3578666+tgibson11@users.noreply.github.com> Date: Fri, 20 Oct 2023 09:42:12 -0700 Subject: [PATCH 042/235] Update some spread costs --- data/futures/csvconfig/spreadcosts.csv | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/data/futures/csvconfig/spreadcosts.csv b/data/futures/csvconfig/spreadcosts.csv index 94acff1915..0902a6aae7 100644 --- a/data/futures/csvconfig/spreadcosts.csv +++ b/data/futures/csvconfig/spreadcosts.csv @@ -54,7 +54,7 @@ CAN-FINANCE,0 CAN-GOLD,0 CAN-TECH,0 CANNABIS_small,0 -CANOLA,2 +CANOLA,1.2 CH10,0.25 CHEESE,0.0059 CHF,5.8E-05 @@ -73,7 +73,7 @@ CNHEUR,0 COAL,0 COAL-GEORDIE,0 COAL-RICH-BAY,0 -COCOA,0.5 +COCOA,1.0 COCOA_LDN,0 COFFEE,0.1 COPPER,0.00043 @@ -86,7 +86,7 @@ CORN-JPN,0 CORN_mini,0.2 CORRA,0.008 COTTON,0.75 -COTTON2,0.078 +COTTON2,0.054 CRUDE_ICE,0 CRUDE_W,0.012 CRUDE_W_micro,0.017 From 3a9ee84ea02dc2e35fff41b3c6ed1804ccce6b9c Mon Sep 17 00:00:00 2001 From: todd <3578666+tgibson11@users.noreply.github.com> Date: Fri, 20 Oct 2023 09:43:58 -0700 Subject: [PATCH 043/235] Revert "Update some spread costs" This reverts commit 20c6e7219d8e06af7c32672d55eefaefdfc328a6. --- data/futures/csvconfig/spreadcosts.csv | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/data/futures/csvconfig/spreadcosts.csv b/data/futures/csvconfig/spreadcosts.csv index 0902a6aae7..94acff1915 100644 --- a/data/futures/csvconfig/spreadcosts.csv +++ b/data/futures/csvconfig/spreadcosts.csv @@ -54,7 +54,7 @@ CAN-FINANCE,0 CAN-GOLD,0 CAN-TECH,0 CANNABIS_small,0 -CANOLA,1.2 +CANOLA,2 CH10,0.25 CHEESE,0.0059 CHF,5.8E-05 @@ -73,7 +73,7 @@ CNHEUR,0 COAL,0 COAL-GEORDIE,0 COAL-RICH-BAY,0 -COCOA,1.0 +COCOA,0.5 COCOA_LDN,0 COFFEE,0.1 COPPER,0.00043 @@ -86,7 +86,7 @@ CORN-JPN,0 CORN_mini,0.2 CORRA,0.008 COTTON,0.75 -COTTON2,0.054 +COTTON2,0.078 CRUDE_ICE,0 CRUDE_W,0.012 CRUDE_W_micro,0.017 From cffecf45962bbb235589ca2f48f26e89f4f2434e Mon Sep 17 00:00:00 2001 From: Andy Geach Date: Tue, 24 Oct 2023 16:06:39 +0100 Subject: [PATCH 044/235] log twice twice --- sysbrokers/IB/client/ib_contracts_client.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/sysbrokers/IB/client/ib_contracts_client.py b/sysbrokers/IB/client/ib_contracts_client.py index 096b36a98c..017b3ab118 100644 --- a/sysbrokers/IB/client/ib_contracts_client.py +++ b/sysbrokers/IB/client/ib_contracts_client.py @@ -235,8 +235,7 @@ def ib_get_saved_weekly_trading_hours_for_timezone_of_contract( "Check ib_config_trading_hours in sysbrokers/IB or private directory, hours for timezone %s not found!" % time_zone_id ) - # TODO check this double log - self.log.log.critical(error_msg, **log_attrs) + self.log.critical(error_msg, **log_attrs) raise missingData return weekly_hours_for_timezone From 3e11363572cf7206b92f17194291eb2e8ea2d0ab Mon Sep 17 00:00:00 2001 From: Andy Geach Date: Wed, 1 Nov 2023 23:33:22 +0000 Subject: [PATCH 045/235] better string comparison: == not is --- sysexecution/order_stacks/contract_order_stack.py | 2 +- sysexecution/order_stacks/instrument_order_stack.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/sysexecution/order_stacks/contract_order_stack.py b/sysexecution/order_stacks/contract_order_stack.py index 8573c6505d..77d27d1e3f 100644 --- a/sysexecution/order_stacks/contract_order_stack.py +++ b/sysexecution/order_stacks/contract_order_stack.py @@ -87,7 +87,7 @@ def list_of_orders_with_instrument_code(self, instrument_code: str) -> list: list_of_orders = [ order for order in list_of_orders - if order.instrument_code is instrument_code + if order.instrument_code == instrument_code ] return list_of_orders diff --git a/sysexecution/order_stacks/instrument_order_stack.py b/sysexecution/order_stacks/instrument_order_stack.py index 7cb007c54d..0b44ff7255 100644 --- a/sysexecution/order_stacks/instrument_order_stack.py +++ b/sysexecution/order_stacks/instrument_order_stack.py @@ -19,7 +19,7 @@ def list_of_orders_with_instrument_code(self, instrument_code: str) -> list: list_of_orders = [ order for order in list_of_orders - if order.instrument_code is instrument_code + if order.instrument_code == instrument_code ] return list_of_orders From 51ed07a730372d498dfd8df1d3589bba760ef5ca Mon Sep 17 00:00:00 2001 From: Andy Geach Date: Thu, 2 Nov 2023 13:06:09 +0000 Subject: [PATCH 046/235] reduce excessive log messages when no contract order spawned --- .../spawn_children_from_instrument_orders.py | 19 +++++++++++-------- 1 file changed, 11 insertions(+), 8 deletions(-) diff --git a/sysexecution/stack_handler/spawn_children_from_instrument_orders.py b/sysexecution/stack_handler/spawn_children_from_instrument_orders.py index 54db138d70..968ef1a0c3 100644 --- a/sysexecution/stack_handler/spawn_children_from_instrument_orders.py +++ b/sysexecution/stack_handler/spawn_children_from_instrument_orders.py @@ -64,15 +64,18 @@ def spawn_children_from_instrument_order_id(self, instrument_order_id: int): self.data, instrument_order ) - log = instrument_order.log_with_attributes(self.log) - log.debug("List of contract orders spawned %s" % str(list_of_contract_orders)) + if len(list_of_contract_orders) > 0: + log = instrument_order.log_with_attributes(self.log) + log.debug( + "List of contract orders spawned %s" % str(list_of_contract_orders) + ) - self.add_children_to_stack_and_child_id_to_parent( - self.instrument_stack, - self.contract_stack, - instrument_order, - list_of_contract_orders, - ) + self.add_children_to_stack_and_child_id_to_parent( + self.instrument_stack, + self.contract_stack, + instrument_order, + list_of_contract_orders, + ) def add_children_to_stack_and_child_id_to_parent( self, From abb1fb40fdda6937e2572dd4cd2e15620ab3b1d2 Mon Sep 17 00:00:00 2001 From: Andy Geach Date: Thu, 2 Nov 2023 14:44:13 +0000 Subject: [PATCH 047/235] reduce excessive logging on dataBroker() creation --- sysexecution/algos/algo_original_best.py | 27 ++++++++++++++---------- 1 file changed, 16 insertions(+), 11 deletions(-) diff --git a/sysexecution/algos/algo_original_best.py b/sysexecution/algos/algo_original_best.py index 1b8a02646c..b2ef12f456 100644 --- a/sysexecution/algos/algo_original_best.py +++ b/sysexecution/algos/algo_original_best.py @@ -111,7 +111,7 @@ def prepare_and_submit_trade(self) -> orderWithControls: try: okay_to_do_limit_trade = limit_trade_viable( ticker_object=ticker_object, - data=data, + data_broker=self.data_broker, order=cut_down_contract_order, log=log, ) @@ -151,7 +151,6 @@ def manage_live_trade( log = broker_order_with_controls_and_order_id.order.log_with_attributes( data.log ) - data_broker = dataBroker(data) trade_open = True is_aggressive = False @@ -181,7 +180,7 @@ def manage_live_trade( else: # passive limit trade reason_to_switch = reason_to_switch_to_aggressive( - data=data, + data_broker=self.data_broker, broker_order_with_controls=broker_order_with_controls_and_order_id, log=log, ) @@ -210,8 +209,10 @@ def manage_live_trade( ) break - order_cancelled = data_broker.check_order_is_cancelled_given_control_object( - broker_order_with_controls_and_order_id + order_cancelled = ( + self.data_broker.check_order_is_cancelled_given_control_object( + broker_order_with_controls_and_order_id + ) ) if order_cancelled: log.warning("Order has been cancelled: not by algo") @@ -221,7 +222,10 @@ def manage_live_trade( def limit_trade_viable( - data: dataBlob, order: contractOrder, ticker_object: tickerObject, log: pst_logger + data_broker: dataBroker, + order: contractOrder, + ticker_object: tickerObject, + log: pst_logger, ) -> bool: # no point doing limit order if we've got imbalanced size issues, as we'd @@ -235,7 +239,7 @@ def limit_trade_viable( return False # or if not enough time left - if is_market_about_to_close(data, order=order, log=log): + if is_market_about_to_close(data_broker=data_broker, order=order, log=log): log.debug( "Market about to close or stack handler nearly close - doing market order" @@ -284,7 +288,9 @@ def file_log_report_limit_order( def reason_to_switch_to_aggressive( - data: dataBlob, broker_order_with_controls: orderWithControls, log: pst_logger + data_broker: dataBroker, + broker_order_with_controls: orderWithControls, + log: pst_logger, ) -> str: ticker_object = broker_order_with_controls.ticker @@ -298,7 +304,7 @@ def reason_to_switch_to_aggressive( ) market_about_to_close = is_market_about_to_close( - data=data, order=broker_order_with_controls, log=log + data_broker=data_broker, order=broker_order_with_controls, log=log ) if market_about_to_close: return "Market is closing soon or stack handler will end soon" @@ -325,11 +331,10 @@ def reason_to_switch_to_aggressive( def is_market_about_to_close( - data: dataBlob, + data_broker: dataBroker, order: Union[brokerOrder, contractOrder, orderWithControls], log: pst_logger, ) -> bool: - data_broker = dataBroker(data) try: short_of_time = data_broker.less_than_N_hours_of_trading_left_for_contract( From 7a95cce9c2f546166fcab7789c1750b6c22e6a22 Mon Sep 17 00:00:00 2001 From: Andy Geach Date: Thu, 2 Nov 2023 14:45:00 +0000 Subject: [PATCH 048/235] add minimal sleep() to wait loops, giving the GIL some breathing space --- sysexecution/algos/algo_market.py | 2 ++ sysexecution/algos/algo_original_best.py | 3 ++- sysexecution/algos/common_functions.py | 3 ++- 3 files changed, 6 insertions(+), 2 deletions(-) diff --git a/sysexecution/algos/algo_market.py b/sysexecution/algos/algo_market.py index 6197b6ac5c..41894019d8 100644 --- a/sysexecution/algos/algo_market.py +++ b/sysexecution/algos/algo_market.py @@ -2,6 +2,7 @@ Simplest possible execution method, one market order """ from copy import copy +import time from sysexecution.orders.named_order_objects import missing_order from sysexecution.algos.algo import Algo @@ -86,6 +87,7 @@ def manage_live_trade( % str(broker_order_with_controls.order) ) while trade_open: + time.sleep(0.001) log_message_required = broker_order_with_controls.message_required( messaging_frequency_seconds=MESSAGING_FREQUENCY ) diff --git a/sysexecution/algos/algo_original_best.py b/sysexecution/algos/algo_original_best.py index b2ef12f456..79fd985f29 100644 --- a/sysexecution/algos/algo_original_best.py +++ b/sysexecution/algos/algo_original_best.py @@ -2,7 +2,7 @@ This is the original 'best execution' algo I used in my legacy system """ from typing import Union - +import time from syscore.exceptions import missingData, marketClosed from sysexecution.orders.named_order_objects import missing_order @@ -164,6 +164,7 @@ def manage_live_trade( ) while trade_open: + time.sleep(0.001) if broker_order_with_controls_and_order_id.message_required( messaging_frequency_seconds=MESSAGING_FREQUENCY ): diff --git a/sysexecution/algos/common_functions.py b/sysexecution/algos/common_functions.py index 513a1987af..39edaf8315 100644 --- a/sysexecution/algos/common_functions.py +++ b/sysexecution/algos/common_functions.py @@ -1,5 +1,5 @@ # functions used by multiple algos - +import time from syscore.exceptions import orderCannotBeModified from sysdata.data_blob import dataBlob from sysproduction.data.broker import dataBroker @@ -43,6 +43,7 @@ def cancel_order( timer = quickTimer(seconds=CANCEL_WAIT_TIME) not_cancelled = True while not_cancelled: + time.sleep(0.001) is_cancelled = data_broker.check_order_is_cancelled_given_control_object( broker_order_with_controls ) From f8cbad7a6adffccd73930be11fb16784f8b1f3ca Mon Sep 17 00:00:00 2001 From: Andy Geach Date: Fri, 3 Nov 2023 13:58:33 +0000 Subject: [PATCH 049/235] reduce excessive logging in 'trade_open' loop --- sysexecution/algos/algo_original_best.py | 7 ++++--- sysexecution/algos/common_functions.py | 5 ++--- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/sysexecution/algos/algo_original_best.py b/sysexecution/algos/algo_original_best.py index 79fd985f29..1256f098f8 100644 --- a/sysexecution/algos/algo_original_best.py +++ b/sysexecution/algos/algo_original_best.py @@ -176,7 +176,8 @@ def manage_live_trade( if is_aggressive: ## aggressive keep limit price in line set_aggressive_limit_price( - data, broker_order_with_controls_and_order_id + data_broker=self.data_broker, + broker_order_with_controls=broker_order_with_controls_and_order_id, ) else: # passive limit trade @@ -423,7 +424,7 @@ def _is_insufficient_size_on_our_preferred_side( def set_aggressive_limit_price( - data: dataBlob, broker_order_with_controls: orderWithControls + data_broker: dataBroker, broker_order_with_controls: orderWithControls ) -> orderWithControls: limit_trade = broker_order_with_controls.order.order_type == limit_order_type if not limit_trade: @@ -437,7 +438,7 @@ def set_aggressive_limit_price( pass else: broker_order_with_controls = set_limit_price( - data, broker_order_with_controls, new_limit_price + data_broker, broker_order_with_controls, new_limit_price ) return broker_order_with_controls diff --git a/sysexecution/algos/common_functions.py b/sysexecution/algos/common_functions.py index 39edaf8315..a8411952d4 100644 --- a/sysexecution/algos/common_functions.py +++ b/sysexecution/algos/common_functions.py @@ -58,13 +58,12 @@ def cancel_order( def set_limit_price( - data: dataBlob, + data_broker: dataBroker, broker_order_with_controls: orderWithControls, new_limit_price: float, ): - log = broker_order_with_controls.order.log_with_attributes(data.log) - data_broker = dataBroker(data) + log = broker_order_with_controls.order.log_with_attributes(data_broker.data.log) try: broker_order_with_controls = ( From 7dfea17294e0ad8228aa52a58e7c41e2ab9ce5cb Mon Sep 17 00:00:00 2001 From: Andy Geach Date: Mon, 6 Nov 2023 13:01:52 +0000 Subject: [PATCH 050/235] reduce excessive logging in 'trade_open' loop --- sysproduction/data/broker.py | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/sysproduction/data/broker.py b/sysproduction/data/broker.py index a1c553d0f8..b4a49af3f3 100644 --- a/sysproduction/data/broker.py +++ b/sysproduction/data/broker.py @@ -45,6 +45,10 @@ class dataBroker(productionDataLayerGeneric): + def __init__(self, data: dataBlob = arg_not_supplied): + super().__init__(data) + self._diag_controls = diagControlProcess() + def _add_required_classes_to_data(self, data) -> dataBlob: # Add a list of broker specific classes that will be aliased as self.data.broker_fx_prices, @@ -90,6 +94,10 @@ def broker_fx_handling_data(self) -> brokerFxHandlingData: def broker_static_data(self) -> brokerStaticData: return self.data.broker_static + @property + def diag_controls(self) -> diagControlProcess: + return self._diag_controls + ## Methods def get_list_of_contract_dates_for_instrument_code( @@ -191,9 +199,8 @@ def less_than_N_hours_of_trading_left_for_contract( self, contract: futuresContract, N_hours: float = 1.0 ) -> bool: - diag_controls = diagControlProcess() hours_left_before_process_finishes = ( - diag_controls.how_long_in_hours_before_trading_process_finishes() + self.diag_controls.how_long_in_hours_before_trading_process_finishes() ) if hours_left_before_process_finishes < N_hours: From f2dbe3133b1a1a649489a6ddd47216b654617081 Mon Sep 17 00:00:00 2001 From: rob Date: Mon, 6 Nov 2023 16:29:25 +0000 Subject: [PATCH 051/235] can use long only constraint in dynamic optimisation now --- .../data_for_optimisation.py | 1 + .../dynamic_small_system_optimise/optimisation.py | 2 ++ .../set_up_constraints.py | 13 ++++++++++++- 3 files changed, 15 insertions(+), 1 deletion(-) diff --git a/systems/provided/dynamic_small_system_optimise/data_for_optimisation.py b/systems/provided/dynamic_small_system_optimise/data_for_optimisation.py index 156e0ac560..be561cdf40 100644 --- a/systems/provided/dynamic_small_system_optimise/data_for_optimisation.py +++ b/systems/provided/dynamic_small_system_optimise/data_for_optimisation.py @@ -15,6 +15,7 @@ def __init__(self, obj_instance: "objectiveFunctionForGreedy"): self.weights_optimal = obj_instance.weights_optimal self.per_contract_value = obj_instance.per_contract_value self.costs = obj_instance.costs + self.long_only = obj_instance.long_only if obj_instance.constraints is arg_not_supplied: reduce_only_keys = no_trade_keys = arg_not_supplied diff --git a/systems/provided/dynamic_small_system_optimise/optimisation.py b/systems/provided/dynamic_small_system_optimise/optimisation.py index 6ea008e32a..ba32e217fb 100644 --- a/systems/provided/dynamic_small_system_optimise/optimisation.py +++ b/systems/provided/dynamic_small_system_optimise/optimisation.py @@ -39,6 +39,7 @@ def __init__( previous_positions: portfolioWeights = arg_not_supplied, constraints: constraintsForDynamicOpt = arg_not_supplied, maximum_positions: portfolioWeights = arg_not_supplied, + long_only: list = arg_not_supplied, log: pst_logger = get_logger("objectiveFunctionForGreedy"), ): @@ -53,6 +54,7 @@ def __init__( self.weights_optimal = weights_optimal self.contracts_optimal = contracts_optimal + self.long_only = long_only if previous_positions is arg_not_supplied: weights_prior = arg_not_supplied diff --git a/systems/provided/dynamic_small_system_optimise/set_up_constraints.py b/systems/provided/dynamic_small_system_optimise/set_up_constraints.py index 25bba896bd..56a7bb218f 100644 --- a/systems/provided/dynamic_small_system_optimise/set_up_constraints.py +++ b/systems/provided/dynamic_small_system_optimise/set_up_constraints.py @@ -72,6 +72,10 @@ def get_data_and_calculate_for_code( no_trade = False else: no_trade = instrument_code in input_data.no_trade_keys + if input_data.long_only is arg_not_supplied: + long_only = False + else: + long_only = instrument_code in input_data.long_only max_position = input_data.maximum_position_weight_for_code(instrument_code) weight_prior = input_data.prior_weight_for_code(instrument_code) @@ -83,6 +87,7 @@ def get_data_and_calculate_for_code( max_position=max_position, weight_prior=weight_prior, optimium_weight=optimium_weight, + long_only=long_only, ) return min_max_and_direction_and_start_for_code @@ -94,6 +99,7 @@ def calculations_for_code( max_position: float = arg_not_supplied, weight_prior: float = arg_not_supplied, optimium_weight: float = np.nan, + long_only: bool = False, ): minimum, maximum = calculate_minima_and_maxima( @@ -101,6 +107,7 @@ def calculations_for_code( no_trade=no_trade, max_position=max_position, weight_prior=weight_prior, + long_only=long_only, ) assert maximum >= minimum @@ -118,6 +125,7 @@ def calculations_for_code( def calculate_minima_and_maxima( reduce_only: bool = False, + long_only: bool = False, no_trade: bool = False, max_position: float = arg_not_supplied, weight_prior: float = arg_not_supplied, @@ -126,6 +134,9 @@ def calculate_minima_and_maxima( minimum = -A_VERY_LARGE_NUMBER maximum = A_VERY_LARGE_NUMBER + if long_only: + minimum = 0.0 + if no_trade: if weight_prior is not arg_not_supplied: return weight_prior, weight_prior @@ -136,7 +147,7 @@ def calculate_minima_and_maxima( minimum = 0.0 maximum = weight_prior elif weight_prior < 0: - minimum = weight_prior + minimum = max(minimum, weight_prior) maximum = 0.0 else: From 9397726d6d35ec9146fc69db8c629e253126ae24 Mon Sep 17 00:00:00 2001 From: rob Date: Mon, 6 Nov 2023 17:10:18 +0000 Subject: [PATCH 052/235] can use long only constraint in dynamic optimisation now --- .../dynamic_small_system_optimise/data_for_optimisation.py | 5 +++-- .../provided/dynamic_small_system_optimise/optimisation.py | 2 +- .../dynamic_small_system_optimise/set_up_constraints.py | 2 +- 3 files changed, 5 insertions(+), 4 deletions(-) diff --git a/systems/provided/dynamic_small_system_optimise/data_for_optimisation.py b/systems/provided/dynamic_small_system_optimise/data_for_optimisation.py index be561cdf40..b02a7fc706 100644 --- a/systems/provided/dynamic_small_system_optimise/data_for_optimisation.py +++ b/systems/provided/dynamic_small_system_optimise/data_for_optimisation.py @@ -15,17 +15,18 @@ def __init__(self, obj_instance: "objectiveFunctionForGreedy"): self.weights_optimal = obj_instance.weights_optimal self.per_contract_value = obj_instance.per_contract_value self.costs = obj_instance.costs - self.long_only = obj_instance.long_only if obj_instance.constraints is arg_not_supplied: - reduce_only_keys = no_trade_keys = arg_not_supplied + long_only_keys = reduce_only_keys = no_trade_keys = arg_not_supplied else: no_trade_keys = obj_instance.constraints.no_trade_keys reduce_only_keys = obj_instance.constraints.reduce_only_keys + long_only_keys = obj_instance.constraints.long_only_keys self.no_trade_keys = no_trade_keys self.reduce_only_keys = reduce_only_keys + self.long_only_keys = long_only_keys self.weights_prior = obj_instance.weights_prior self.maximum_position_weights = obj_instance.maximum_position_weights diff --git a/systems/provided/dynamic_small_system_optimise/optimisation.py b/systems/provided/dynamic_small_system_optimise/optimisation.py index ba32e217fb..8e9df18b22 100644 --- a/systems/provided/dynamic_small_system_optimise/optimisation.py +++ b/systems/provided/dynamic_small_system_optimise/optimisation.py @@ -26,6 +26,7 @@ class constraintsForDynamicOpt: reduce_only_keys: list = arg_not_supplied no_trade_keys: list = arg_not_supplied + long_only_keys: list = arg_not_supplied class objectiveFunctionForGreedy: @@ -39,7 +40,6 @@ def __init__( previous_positions: portfolioWeights = arg_not_supplied, constraints: constraintsForDynamicOpt = arg_not_supplied, maximum_positions: portfolioWeights = arg_not_supplied, - long_only: list = arg_not_supplied, log: pst_logger = get_logger("objectiveFunctionForGreedy"), ): diff --git a/systems/provided/dynamic_small_system_optimise/set_up_constraints.py b/systems/provided/dynamic_small_system_optimise/set_up_constraints.py index 56a7bb218f..151ca146d5 100644 --- a/systems/provided/dynamic_small_system_optimise/set_up_constraints.py +++ b/systems/provided/dynamic_small_system_optimise/set_up_constraints.py @@ -75,7 +75,7 @@ def get_data_and_calculate_for_code( if input_data.long_only is arg_not_supplied: long_only = False else: - long_only = instrument_code in input_data.long_only + long_only = instrument_code in input_data.long_only_keys max_position = input_data.maximum_position_weight_for_code(instrument_code) weight_prior = input_data.prior_weight_for_code(instrument_code) From af7ceee537f3dbf22c3286730cc35ddfc9672cdc Mon Sep 17 00:00:00 2001 From: rob Date: Mon, 6 Nov 2023 17:15:52 +0000 Subject: [PATCH 053/235] can use long only constraint in dynamic optimisation now --- systems/provided/dynamic_small_system_optimise/optimisation.py | 1 - 1 file changed, 1 deletion(-) diff --git a/systems/provided/dynamic_small_system_optimise/optimisation.py b/systems/provided/dynamic_small_system_optimise/optimisation.py index 8e9df18b22..2ac9e0ba8d 100644 --- a/systems/provided/dynamic_small_system_optimise/optimisation.py +++ b/systems/provided/dynamic_small_system_optimise/optimisation.py @@ -54,7 +54,6 @@ def __init__( self.weights_optimal = weights_optimal self.contracts_optimal = contracts_optimal - self.long_only = long_only if previous_positions is arg_not_supplied: weights_prior = arg_not_supplied From 562aeb61bf7b85a8d3c989c8212108547d82f464 Mon Sep 17 00:00:00 2001 From: rob Date: Mon, 6 Nov 2023 17:30:21 +0000 Subject: [PATCH 054/235] can use long only constraint in dynamic optimisation now --- .../dynamic_small_system_optimise/set_up_constraints.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/systems/provided/dynamic_small_system_optimise/set_up_constraints.py b/systems/provided/dynamic_small_system_optimise/set_up_constraints.py index 151ca146d5..e0792ac32e 100644 --- a/systems/provided/dynamic_small_system_optimise/set_up_constraints.py +++ b/systems/provided/dynamic_small_system_optimise/set_up_constraints.py @@ -72,7 +72,7 @@ def get_data_and_calculate_for_code( no_trade = False else: no_trade = instrument_code in input_data.no_trade_keys - if input_data.long_only is arg_not_supplied: + if input_data.long_only_keys is arg_not_supplied: long_only = False else: long_only = instrument_code in input_data.long_only_keys From 9baf989792395f6faf73691ad3ea3b766a100e70 Mon Sep 17 00:00:00 2001 From: rob Date: Tue, 7 Nov 2023 09:19:30 +0000 Subject: [PATCH 055/235] final piece of long only DO --- .../optimised_positions_stage.py | 15 +++++++++++++-- 1 file changed, 13 insertions(+), 2 deletions(-) diff --git a/systems/provided/dynamic_small_system_optimise/optimised_positions_stage.py b/systems/provided/dynamic_small_system_optimise/optimised_positions_stage.py index 812c06cc36..408c3b0657 100644 --- a/systems/provided/dynamic_small_system_optimise/optimised_positions_stage.py +++ b/systems/provided/dynamic_small_system_optimise/optimised_positions_stage.py @@ -137,15 +137,26 @@ def get_constraints(self) -> constraintsForDynamicOpt: ## 'reduce only' is as good as do not trade in backtesting ## but we use this rather than 'don't trade' for consistency with production reduce_only_keys = self.get_reduce_only_instruments() + long_only_keys = self.get_long_only_instruments() - return constraintsForDynamicOpt(reduce_only_keys=reduce_only_keys) + return constraintsForDynamicOpt( + reduce_only_keys=reduce_only_keys, long_only_keys=long_only_keys + ) - @diagnostic() + @input def get_reduce_only_instruments(self) -> list: reduce_only_keys = self.parent.get_list_of_markets_not_trading_but_with_data() return reduce_only_keys + @input + def get_long_only_instruments(self) -> list: + long_only_keys = ( + self.config.get_element_or_default("long_only_instruments_DO_ONLY", []), + ) + + return long_only_keys + def get_speed_control(self): small_config = self.config.small_system trade_shadow_cost = small_config["shadow_cost"] From e11dc460d8f26138fa7d50547cdb4720fe24a322 Mon Sep 17 00:00:00 2001 From: rob Date: Tue, 7 Nov 2023 09:22:32 +0000 Subject: [PATCH 056/235] final piece of long only DO --- .../dynamic_small_system_optimise/optimised_positions_stage.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/systems/provided/dynamic_small_system_optimise/optimised_positions_stage.py b/systems/provided/dynamic_small_system_optimise/optimised_positions_stage.py index 408c3b0657..0b4d4a3c40 100644 --- a/systems/provided/dynamic_small_system_optimise/optimised_positions_stage.py +++ b/systems/provided/dynamic_small_system_optimise/optimised_positions_stage.py @@ -143,13 +143,11 @@ def get_constraints(self) -> constraintsForDynamicOpt: reduce_only_keys=reduce_only_keys, long_only_keys=long_only_keys ) - @input def get_reduce_only_instruments(self) -> list: reduce_only_keys = self.parent.get_list_of_markets_not_trading_but_with_data() return reduce_only_keys - @input def get_long_only_instruments(self) -> list: long_only_keys = ( self.config.get_element_or_default("long_only_instruments_DO_ONLY", []), From 4e279d82294433bd1504e555a2efc889d1083ea0 Mon Sep 17 00:00:00 2001 From: rob Date: Tue, 7 Nov 2023 10:33:16 +0000 Subject: [PATCH 057/235] final piece of long only DO --- .../optimised_positions_stage.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/systems/provided/dynamic_small_system_optimise/optimised_positions_stage.py b/systems/provided/dynamic_small_system_optimise/optimised_positions_stage.py index 0b4d4a3c40..c98000c3ee 100644 --- a/systems/provided/dynamic_small_system_optimise/optimised_positions_stage.py +++ b/systems/provided/dynamic_small_system_optimise/optimised_positions_stage.py @@ -149,9 +149,9 @@ def get_reduce_only_instruments(self) -> list: return reduce_only_keys def get_long_only_instruments(self) -> list: - long_only_keys = ( + long_only_keys =\ self.config.get_element_or_default("long_only_instruments_DO_ONLY", []), - ) + return long_only_keys From 2b2f84a8fc1a6132b851ec12f7a58ded7876f822 Mon Sep 17 00:00:00 2001 From: Andy Geach Date: Tue, 7 Nov 2023 10:41:34 +0000 Subject: [PATCH 058/235] merge latest, black --- .../optimised_positions_stage.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/systems/provided/dynamic_small_system_optimise/optimised_positions_stage.py b/systems/provided/dynamic_small_system_optimise/optimised_positions_stage.py index c98000c3ee..0b4d4a3c40 100644 --- a/systems/provided/dynamic_small_system_optimise/optimised_positions_stage.py +++ b/systems/provided/dynamic_small_system_optimise/optimised_positions_stage.py @@ -149,9 +149,9 @@ def get_reduce_only_instruments(self) -> list: return reduce_only_keys def get_long_only_instruments(self) -> list: - long_only_keys =\ + long_only_keys = ( self.config.get_element_or_default("long_only_instruments_DO_ONLY", []), - + ) return long_only_keys From 155ffc76eaba020ce92c3dc225defd6f92d9c046 Mon Sep 17 00:00:00 2001 From: rob Date: Tue, 7 Nov 2023 10:54:43 +0000 Subject: [PATCH 059/235] final piece of long only DO --- .../optimised_positions_stage.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/systems/provided/dynamic_small_system_optimise/optimised_positions_stage.py b/systems/provided/dynamic_small_system_optimise/optimised_positions_stage.py index c98000c3ee..b25edabead 100644 --- a/systems/provided/dynamic_small_system_optimise/optimised_positions_stage.py +++ b/systems/provided/dynamic_small_system_optimise/optimised_positions_stage.py @@ -149,9 +149,9 @@ def get_reduce_only_instruments(self) -> list: return reduce_only_keys def get_long_only_instruments(self) -> list: - long_only_keys =\ + long_only_keys = ( self.config.get_element_or_default("long_only_instruments_DO_ONLY", []), - + )[0] ## can't stop black wrapping in brackets producing a tuple return long_only_keys From ebd02ba8a80d488b36b0990874ce2282aa531fbf Mon Sep 17 00:00:00 2001 From: Andy Geach Date: Tue, 7 Nov 2023 11:52:45 +0000 Subject: [PATCH 060/235] tell Black to ignore one line https://black.readthedocs.io/en/stable/the_black_code_style/current_style.html --- .../optimised_positions_stage.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/systems/provided/dynamic_small_system_optimise/optimised_positions_stage.py b/systems/provided/dynamic_small_system_optimise/optimised_positions_stage.py index b25edabead..d8a6b72e87 100644 --- a/systems/provided/dynamic_small_system_optimise/optimised_positions_stage.py +++ b/systems/provided/dynamic_small_system_optimise/optimised_positions_stage.py @@ -149,9 +149,7 @@ def get_reduce_only_instruments(self) -> list: return reduce_only_keys def get_long_only_instruments(self) -> list: - long_only_keys = ( - self.config.get_element_or_default("long_only_instruments_DO_ONLY", []), - )[0] ## can't stop black wrapping in brackets producing a tuple + long_only_keys = self.config.get_element_or_default("long_only_instruments_DO_ONLY", []) # fmt: skip return long_only_keys From 0d13486ed2c07f33813c171293ab29834965bd44 Mon Sep 17 00:00:00 2001 From: rob Date: Tue, 7 Nov 2023 14:38:37 +0000 Subject: [PATCH 061/235] fix to issue with reduce only and no trade in DO --- .../optimised_positions_stage.py | 4 +++- .../set_up_constraints.py | 14 +++++++++----- 2 files changed, 12 insertions(+), 6 deletions(-) diff --git a/systems/provided/dynamic_small_system_optimise/optimised_positions_stage.py b/systems/provided/dynamic_small_system_optimise/optimised_positions_stage.py index b25edabead..e587c0077a 100644 --- a/systems/provided/dynamic_small_system_optimise/optimised_positions_stage.py +++ b/systems/provided/dynamic_small_system_optimise/optimised_positions_stage.py @@ -151,7 +151,9 @@ def get_reduce_only_instruments(self) -> list: def get_long_only_instruments(self) -> list: long_only_keys = ( self.config.get_element_or_default("long_only_instruments_DO_ONLY", []), - )[0] ## can't stop black wrapping in brackets producing a tuple + )[ + 0 + ] ## can't stop black wrapping in brackets producing a tuple return long_only_keys diff --git a/systems/provided/dynamic_small_system_optimise/set_up_constraints.py b/systems/provided/dynamic_small_system_optimise/set_up_constraints.py index e0792ac32e..f4af4b5402 100644 --- a/systems/provided/dynamic_small_system_optimise/set_up_constraints.py +++ b/systems/provided/dynamic_small_system_optimise/set_up_constraints.py @@ -6,7 +6,8 @@ from syscore.constants import arg_not_supplied from sysquant.optimisation.weights import portfolioWeights -A_VERY_LARGE_NUMBER = 999999999 +A_VERY_LARGE_NUMBER = 999 ## +A_VERY_SMALL_NUMBER = 0.000001 class minMaxAndDirectionAndStart(dict): @@ -72,6 +73,7 @@ def get_data_and_calculate_for_code( no_trade = False else: no_trade = instrument_code in input_data.no_trade_keys + if input_data.long_only_keys is arg_not_supplied: long_only = False else: @@ -135,24 +137,26 @@ def calculate_minima_and_maxima( maximum = A_VERY_LARGE_NUMBER if long_only: - minimum = 0.0 + minimum = -A_VERY_SMALL_NUMBER if no_trade: if weight_prior is not arg_not_supplied: return weight_prior, weight_prior + else: + return -A_VERY_SMALL_NUMBER, +A_VERY_SMALL_NUMBER if reduce_only: if weight_prior is not arg_not_supplied: if weight_prior > 0: - minimum = 0.0 + minimum = -A_VERY_SMALL_NUMBER maximum = weight_prior elif weight_prior < 0: minimum = max(minimum, weight_prior) - maximum = 0.0 + maximum = A_VERY_SMALL_NUMBER else: ## prior weight equals zero, so no trade - return (0.0, 0.0) + return (-A_VERY_SMALL_NUMBER, +A_VERY_SMALL_NUMBER) if max_position is not arg_not_supplied: max_position = abs(max_position) From 0768b6fff3b57c28528f1c002ff5ec3929fd6d76 Mon Sep 17 00:00:00 2001 From: rob Date: Tue, 7 Nov 2023 16:12:02 +0000 Subject: [PATCH 062/235] fix to issue with reduce only and no trade in DO --- .../set_up_constraints.py | 16 +++++++++------- 1 file changed, 9 insertions(+), 7 deletions(-) diff --git a/systems/provided/dynamic_small_system_optimise/set_up_constraints.py b/systems/provided/dynamic_small_system_optimise/set_up_constraints.py index f4af4b5402..761b51604d 100644 --- a/systems/provided/dynamic_small_system_optimise/set_up_constraints.py +++ b/systems/provided/dynamic_small_system_optimise/set_up_constraints.py @@ -137,26 +137,26 @@ def calculate_minima_and_maxima( maximum = A_VERY_LARGE_NUMBER if long_only: - minimum = -A_VERY_SMALL_NUMBER + minimum = 0.0 if no_trade: if weight_prior is not arg_not_supplied: return weight_prior, weight_prior else: - return -A_VERY_SMALL_NUMBER, +A_VERY_SMALL_NUMBER + return 0.0, 0.0 if reduce_only: if weight_prior is not arg_not_supplied: if weight_prior > 0: - minimum = -A_VERY_SMALL_NUMBER + minimum = 0.0 maximum = weight_prior elif weight_prior < 0: minimum = max(minimum, weight_prior) - maximum = A_VERY_SMALL_NUMBER + maximum = 0.0 else: ## prior weight equals zero, so no trade - return (-A_VERY_SMALL_NUMBER, +A_VERY_SMALL_NUMBER) + return 0.0, 0.0 if max_position is not arg_not_supplied: max_position = abs(max_position) @@ -173,10 +173,12 @@ def calculate_direction( minimum: float = -A_VERY_LARGE_NUMBER, maximum: float = A_VERY_LARGE_NUMBER, ) -> float: - if minimum >= 0: + + ## always start at zero, so if minima/maxima already bind we can only go up or down + if minimum >= 0.0: return 1 - if maximum <= 0: + if maximum <= 0.0: return -1 if np.isnan(optimum_weight): From 0053588f9ee8d67256d316aec920ea12672b0285 Mon Sep 17 00:00:00 2001 From: rob Date: Wed, 8 Nov 2023 08:49:25 +0000 Subject: [PATCH 063/235] can now pass constraint functions to DO --- .../optimisation.py | 19 +++++++++++++++++-- 1 file changed, 17 insertions(+), 2 deletions(-) diff --git a/systems/provided/dynamic_small_system_optimise/optimisation.py b/systems/provided/dynamic_small_system_optimise/optimisation.py index 2ac9e0ba8d..116ec39ae6 100644 --- a/systems/provided/dynamic_small_system_optimise/optimisation.py +++ b/systems/provided/dynamic_small_system_optimise/optimisation.py @@ -1,3 +1,4 @@ +from typing import Callable from dataclasses import dataclass import numpy as np @@ -41,6 +42,7 @@ def __init__( constraints: constraintsForDynamicOpt = arg_not_supplied, maximum_positions: portfolioWeights = arg_not_supplied, log: pst_logger = get_logger("objectiveFunctionForGreedy"), + constraint_function: Callable = arg_not_supplied, ): self.covariance_matrix = covariance_matrix @@ -72,7 +74,7 @@ def __init__( self.maximum_position_weights = maximum_position_weights self.maximum_positions = maximum_positions - + self.constraint_function = constraint_function self.log = log def optimise_positions(self) -> portfolioWeights: @@ -218,8 +220,9 @@ def adjust_weights_for_size_of_tracking_error( def evaluate(self, weights: np.array) -> float: track_error = self.tracking_error_against_optimal(weights) trade_costs = self.calculate_costs(weights) + constraint_function_value = self.constraint_function_value(weights) - return track_error + trade_costs + return track_error + trade_costs + constraint_function_value def tracking_error_against_optimal(self, weights: np.array) -> float: track_error = self.tracking_error_against_passed_weights( @@ -266,6 +269,18 @@ def calculate_costs(self, weights: np.array) -> float: def trade_shadow_cost(self): return self.speed_control.trade_shadow_cost + def constraint_function_value(self, weights: np.array): + if self.constraint_function == arg_not_supplied: + return 0.0 + + portfolio_weights = portfolioWeights.from_weights_and_keys( + list_of_weights=weights, list_of_keys=self.keys_with_valid_data + ) + constraint_function = self.constraint_function + value = constraint_function(portfolio_weights) + + return value + @property def starting_weights_as_np(self) -> np.array: return self.input_data.starting_weights_as_np From 95ec105f83819d6dde63216323adb82fc938fb1c Mon Sep 17 00:00:00 2001 From: rob Date: Sat, 11 Nov 2023 12:02:30 +0000 Subject: [PATCH 064/235] die --- systems/provided/dynamic_small_system_optimise/optimisation.py | 1 + 1 file changed, 1 insertion(+) diff --git a/systems/provided/dynamic_small_system_optimise/optimisation.py b/systems/provided/dynamic_small_system_optimise/optimisation.py index 116ec39ae6..ee47405c84 100644 --- a/systems/provided/dynamic_small_system_optimise/optimisation.py +++ b/systems/provided/dynamic_small_system_optimise/optimisation.py @@ -270,6 +270,7 @@ def trade_shadow_cost(self): return self.speed_control.trade_shadow_cost def constraint_function_value(self, weights: np.array): + ## Function that will return a big number if constraints aren't satisfied if self.constraint_function == arg_not_supplied: return 0.0 From 391512c5d34c3f0f06191b73adb81af73f99090b Mon Sep 17 00:00:00 2001 From: todd <3578666+tgibson11@users.noreply.github.com> Date: Tue, 14 Nov 2023 07:31:00 -0700 Subject: [PATCH 065/235] Improve log messages when there is a problem emailing a report (cherry picked from commit ca311ccc0fb32b024f5fd20117d620b9260e10c2) --- syslogdiag/email_via_db_interface.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/syslogdiag/email_via_db_interface.py b/syslogdiag/email_via_db_interface.py index 3deb1886b1..e37cd039f4 100644 --- a/syslogdiag/email_via_db_interface.py +++ b/syslogdiag/email_via_db_interface.py @@ -50,7 +50,7 @@ def send_email_and_record_date_or_store_on_fail( except Exception as e: # problem sending emails will store instead data.log.debug( - "Problem %s sending email subject %s, but message is stored" + "Problem %s sending email subject %s, but message will be stored" % (str(e), subject) ) store_message(data, body, subject, email_is_report=email_is_report) @@ -141,7 +141,7 @@ def record_date_of_email_warning_send(data, subject): def store_message(data, body, subject, email_is_report=False): if email_is_report: - # can't store reports + data.log.debug("Message not stored: can't store reports") return None email_store_file = get_storage_filename(data) From 8efdadddc8fecd5f7686002984e89d572095abbd Mon Sep 17 00:00:00 2001 From: todd <3578666+tgibson11@users.noreply.github.com> Date: Tue, 14 Nov 2023 09:52:10 -0700 Subject: [PATCH 066/235] Fix stack handler bug (cherry picked from commit 4de5eba6be28415e60111688b45e9c1f3e3a3654) --- sysproduction/interactive_order_stack.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sysproduction/interactive_order_stack.py b/sysproduction/interactive_order_stack.py index 1ce51e7c3e..0172742d1e 100644 --- a/sysproduction/interactive_order_stack.py +++ b/sysproduction/interactive_order_stack.py @@ -587,7 +587,7 @@ def generate_generic_manual_fill(data): stack_handler = stackHandler() if type(order) is brokerOrder: ## pass up and change positions - stack_handler.apply_broker_order_fills_to_database(order) + stack_handler.apply_broker_order_fills_to_database(order_id, order) else: stack_handler.apply_contract_order_fill_to_database(order) From a93509e154ee000b4d354c44b538e3e7b0d3a631 Mon Sep 17 00:00:00 2001 From: todd <3578666+tgibson11@users.noreply.github.com> Date: Tue, 14 Nov 2023 12:10:38 -0700 Subject: [PATCH 067/235] Fix stack handler bug (cherry picked from commit 8488b99b9a144c7234ff6cc639b69997716fad5f) --- sysexecution/stack_handler/fills.py | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/sysexecution/stack_handler/fills.py b/sysexecution/stack_handler/fills.py index dccc0ff643..2c93ac6766 100644 --- a/sysexecution/stack_handler/fills.py +++ b/sysexecution/stack_handler/fills.py @@ -93,8 +93,15 @@ def apply_broker_order_fills_to_database( contract_order_id = broker_order.parent - # pass broker fills upwards - self.apply_broker_fills_to_contract_order(contract_order_id) + if contract_order_id is no_parent: + log = broker_order.log_with_attributes(self.log) + log.error( + "No parent for broker order %s %d" + % (str(broker_order), broker_order_id) + ) + else: + # pass broker fills upwards + self.apply_broker_fills_to_contract_order(contract_order_id) def pass_fills_from_broker_up_to_contract(self): list_of_contract_order_ids = self.contract_stack.get_list_of_order_ids() From ec18b75c5d7b9b6350ee75b5e44ebaf5856e9542 Mon Sep 17 00:00:00 2001 From: Rob Carver Date: Fri, 17 Nov 2023 11:31:54 +0000 Subject: [PATCH 068/235] checked into wrong branch --- sysdata/parquet/__init__.py | 0 sysexecution/stack_handler/roll_orders.py | 15 ++++++++++----- 2 files changed, 10 insertions(+), 5 deletions(-) create mode 100644 sysdata/parquet/__init__.py diff --git a/sysdata/parquet/__init__.py b/sysdata/parquet/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/sysexecution/stack_handler/roll_orders.py b/sysexecution/stack_handler/roll_orders.py index 2289445c54..e5297c0fe0 100644 --- a/sysexecution/stack_handler/roll_orders.py +++ b/sysexecution/stack_handler/roll_orders.py @@ -471,12 +471,17 @@ def get_strategy_name_with_largest_position_for_instrument( diag_positions.get_all_current_strategy_instrument_positions() ) - return ( - all_instrument_positions.strategy_name_with_largest_abs_position_for_instrument( - instrument_code - ) - ) + try: + strategy_name = all_instrument_positions.strategy_name_with_largest_abs_position_for_instrument( + instrument_code + ) + except: + ## corner case where nets out to 0 + strategies = diag_positions.get_list_of_strategies_with_positions() + strategy_name = strategies[0] + data.log.debug("No strategies have net positions in %s, using arbitrary strategy %s" % (instrument_code, strategy_name)) + return strategy_name def create_contract_roll_orders( data: dataBlob, From a0273166801d3676df3f3ef75fc62d0d7a1d06e5 Mon Sep 17 00:00:00 2001 From: Rob Carver Date: Fri, 17 Nov 2023 12:56:50 +0000 Subject: [PATCH 069/235] starting to build in parquet --- sysdata/data_blob.py | 29 ++++++++++- sysdata/parquet/parquet_access.py | 38 +++++++++++++++ sysdata/parquet/parquet_adjusted_prices.py | 57 ++++++++++++++++++++++ sysproduction/data/directories.py | 3 ++ 4 files changed, 125 insertions(+), 2 deletions(-) create mode 100644 sysdata/parquet/parquet_access.py create mode 100644 sysdata/parquet/parquet_adjusted_prices.py diff --git a/sysdata/data_blob.py b/sysdata/data_blob.py index 7c11be2943..88357381e6 100644 --- a/sysdata/data_blob.py +++ b/sysdata/data_blob.py @@ -8,7 +8,7 @@ from sysdata.mongodb.mongo_connection import mongoDb from syslogging.logger import * from sysdata.mongodb.mongo_IB_client_id import mongoIbBrokerClientIdData - +from sysdata.parquet.parquet_access import ParquetAccess class dataBlob(object): def __init__( @@ -16,6 +16,7 @@ def __init__( class_list: list = arg_not_supplied, log_name: str = "", csv_data_paths: dict = arg_not_supplied, + parquet_store_path: str = arg_not_supplied, ib_conn: connectionIB = arg_not_supplied, mongo_db: mongoDb = arg_not_supplied, log: pst_logger = arg_not_supplied, @@ -63,6 +64,7 @@ def __init__( self._log_name = log_name self._csv_data_paths = csv_data_paths self._keep_original_prefix = keep_original_prefix + self._parquet_store_path = parquet_store_path self._attr_list = [] @@ -101,6 +103,7 @@ def _get_class_adding_method(self, class_object): csv=self._add_csv_class, arctic=self._add_arctic_class, mongo=self._add_mongo_class, + parquet = self._add_parquet_class ) method_to_add_with = class_dict.get(prefix, None) @@ -166,6 +169,22 @@ def _add_arctic_class(self, class_object): return resolved_instance + def _add_parquet_class(self, class_object): + log = self._get_specific_logger(class_object) + try: + resolved_instance = class_object(parquet_access = self.parquet_access, log=log) + except Exception as e: + class_name = get_class_name(class_object) + msg = ( + "Error '%s' couldn't evaluate %s(parquet_access = self.parquet_access, log = self.log.setup(component = %s)) \ + This might be because import is missing\ + or arguments don't follow pattern or parquet_store is undefined" + % (str(e), class_name, class_name) + ) + self._raise_and_log_error(msg) + + return resolved_instance + def _add_csv_class(self, class_object): datapath = self._get_csv_paths_for_class(class_object) log = self._get_specific_logger(class_object) @@ -308,6 +327,12 @@ def mongo_db(self) -> mongoDb: return mongo_db + @property + def parquet_access(self) -> ParquetAccess: + if self._parquet_store_path is arg_not_supplied: + raise Exception("Need to define parquet_store in config to use parquet") + return ParquetAccess(self._parquet_store_path) + def _get_new_mongo_db(self) -> mongoDb: mongo_db = mongoDb() @@ -340,7 +365,7 @@ def log_name(self) -> str: return log_name -source_dict = dict(arctic="db", mongo="db", csv="db", ib="broker") +source_dict = dict(arctic="db", mongo="db", csv="db", parquet="db",ib="broker") def identifying_name(split_up_name: list, keep_original_prefix=False) -> str: diff --git a/sysdata/parquet/parquet_access.py b/sysdata/parquet/parquet_access.py new file mode 100644 index 0000000000..99d2310068 --- /dev/null +++ b/sysdata/parquet/parquet_access.py @@ -0,0 +1,38 @@ +import os +import pandas as pd +from syscore.fileutils import files_with_extension_in_pathname, resolve_path_and_filename_for_package, get_resolved_pathname, delete_file_if_too_old + +EXTENSION = "parquet" +class ParquetAccess(object): + def __init__(self, parquet_store_path: str): + self.parquet_store = get_resolved_pathname(parquet_store_path) + + def get_all_identifiers_with_data_type(self, data_type: str): + path= self._get_pathname_given_data_type(data_type) + return files_with_extension_in_pathname(path) + + def does_idenitifier_with_data_type_exist(self, data_type: str, identifier: str) -> bool: + filename = self._get_filename_given_data_type_and_identifier(data_type=data_type, identifier=identifier) + return os.path.isfile(filename) + + def delete_data_given_data_type_and_identifier(self, data_type: str, identifier: str): + filename = self._get_filename_given_data_type_and_identifier(data_type=data_type, identifier=identifier) + os.remove(filename) + + def write_data_given_data_type_and_identifier(self, data_to_write: pd.DataFrame, data_type: str, identifier: str): + filename = self._get_filename_given_data_type_and_identifier(data_type=data_type, identifier=identifier) + data_to_write.to_parquet(filename) + + def read_data_given_data_type_and_identifier(self, data_type: str, identifier: str) -> pd.DataFrame: + filename = self._get_filename_given_data_type_and_identifier(data_type=data_type, identifier=identifier) + return pd.read_parquet(filename) + + def _get_filename_given_data_type_and_identifier(self, data_type: str, identifier: str): + path = self._get_pathname_given_data_type(data_type) + return resolve_path_and_filename_for_package(path, seperate_filename="%s.%s" % (identifier, EXTENSION)) + + def _get_pathname_given_data_type(self, data_type: str): + root = self.parquet_store + return os.path.join(root, data_type) + + diff --git a/sysdata/parquet/parquet_adjusted_prices.py b/sysdata/parquet/parquet_adjusted_prices.py new file mode 100644 index 0000000000..b0bca94090 --- /dev/null +++ b/sysdata/parquet/parquet_adjusted_prices.py @@ -0,0 +1,57 @@ +from sysdata.parquet.parquet_access import ParquetAccess +from sysdata.futures.adjusted_prices import ( + futuresAdjustedPricesData, +) +from sysobjects.adjusted_prices import futuresAdjustedPrices + +from syslogging.logger import * +import pandas as pd + +ADJPRICE_COLLECTION = "futures_adjusted_prices" + + +class parquetFuturesAdjustedPricesData(futuresAdjustedPricesData): + """ + Class to read / write multiple futures price data to and from arctic + """ + + def __init__(self, parquet_access: ParquetAccess, log=get_logger("parquetFuturesAdjustedPrices")): + + super().__init__(log=log) + self._parquet = parquet_access + + @property + def parquet(self) -> ParquetAccess: + return self._parquet + + def get_list_of_instruments(self) -> list: + return self.parquet.get_all_identifiers_with_data_type(data_type=ADJPRICE_COLLECTION) + + def _get_adjusted_prices_without_checking( + self, instrument_code: str + ) -> futuresAdjustedPrices: + return self.parquet.read_data_given_data_type_and_identifier(data_type=ADJPRICE_COLLECTION, identifier=instrument_code) + + def _delete_adjusted_prices_without_any_warning_be_careful( + self, instrument_code: str + ): + self.parquet.delete_data_given_data_type_and_identifier(data_type=ADJPRICE_COLLECTION, identifier=instrument_code) + self.log.debug( + "Deleted adjusted prices for %s from %s" % (instrument_code, str(self)), + instrument_code=instrument_code, + ) + + def _add_adjusted_prices_without_checking_for_existing_entry( + self, instrument_code: str, adjusted_price_data: futuresAdjustedPrices + ): + adjusted_price_data_aspd = pd.DataFrame(adjusted_price_data) + adjusted_price_data_aspd.columns = ["price"] + adjusted_price_data_aspd = adjusted_price_data_aspd.astype(float) + + self.parquet.write_data_given_data_type_and_identifier(data_to_write=adjusted_price_data_aspd, data_type=ADJPRICE_COLLECTION, identifier=instrument_code) + + self.log.debug( + "Wrote %s lines of prices for %s to %s" + % (len(adjusted_price_data), instrument_code, str(self)), + instrument_code=instrument_code, + ) diff --git a/sysproduction/data/directories.py b/sysproduction/data/directories.py index a6b019bb8e..5df6954d75 100644 --- a/sysproduction/data/directories.py +++ b/sysproduction/data/directories.py @@ -6,6 +6,9 @@ production_config = get_production_config() +def get_parquet_root_directory(): + path = production_config.get_element("parquet_store") + return get_resolved_pathname(path) def get_main_backup_directory(): ans = production_config.get_element("offsystem_backup_directory") From 4333a854198f9d7b4330746f18b973a17cd83ac2 Mon Sep 17 00:00:00 2001 From: Rob Carver Date: Fri, 17 Nov 2023 13:01:46 +0000 Subject: [PATCH 070/235] starting to build in parquet --- sysdata/parquet/parquet_access.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/sysdata/parquet/parquet_access.py b/sysdata/parquet/parquet_access.py index 99d2310068..e08cc7ab3b 100644 --- a/sysdata/parquet/parquet_access.py +++ b/sysdata/parquet/parquet_access.py @@ -1,6 +1,7 @@ import os import pandas as pd from syscore.fileutils import files_with_extension_in_pathname, resolve_path_and_filename_for_package, get_resolved_pathname, delete_file_if_too_old +from pathlib import Path EXTENSION = "parquet" class ParquetAccess(object): @@ -27,6 +28,10 @@ def read_data_given_data_type_and_identifier(self, data_type: str, identifier: s filename = self._get_filename_given_data_type_and_identifier(data_type=data_type, identifier=identifier) return pd.read_parquet(filename) + def _confirm_or_create_path(self, data_type:str): + path = Path(self._get_pathname_given_data_type(data_type)) + path.mkdir(parents=True, exist_ok=True) + def _get_filename_given_data_type_and_identifier(self, data_type: str, identifier: str): path = self._get_pathname_given_data_type(data_type) return resolve_path_and_filename_for_package(path, seperate_filename="%s.%s" % (identifier, EXTENSION)) @@ -35,4 +40,3 @@ def _get_pathname_given_data_type(self, data_type: str): root = self.parquet_store return os.path.join(root, data_type) - From 12755412faf32202120c69199a5d6d2079f402af Mon Sep 17 00:00:00 2001 From: Rob Carver Date: Fri, 17 Nov 2023 13:11:43 +0000 Subject: [PATCH 071/235] parquet create files --- sysdata/parquet/parquet_access.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/sysdata/parquet/parquet_access.py b/sysdata/parquet/parquet_access.py index e08cc7ab3b..596b10fc4a 100644 --- a/sysdata/parquet/parquet_access.py +++ b/sysdata/parquet/parquet_access.py @@ -28,9 +28,6 @@ def read_data_given_data_type_and_identifier(self, data_type: str, identifier: s filename = self._get_filename_given_data_type_and_identifier(data_type=data_type, identifier=identifier) return pd.read_parquet(filename) - def _confirm_or_create_path(self, data_type:str): - path = Path(self._get_pathname_given_data_type(data_type)) - path.mkdir(parents=True, exist_ok=True) def _get_filename_given_data_type_and_identifier(self, data_type: str, identifier: str): path = self._get_pathname_given_data_type(data_type) @@ -38,5 +35,8 @@ def _get_filename_given_data_type_and_identifier(self, data_type: str, identifie def _get_pathname_given_data_type(self, data_type: str): root = self.parquet_store - return os.path.join(root, data_type) + path = os.path.join(root, data_type) + path.mkdir(parents=True, exist_ok=True) + + return path From 119ebc72662803405627eb3747dc0e1399aa673e Mon Sep 17 00:00:00 2001 From: Rob Carver Date: Fri, 17 Nov 2023 13:14:43 +0000 Subject: [PATCH 072/235] parquet create files --- sysdata/parquet/parquet_access.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sysdata/parquet/parquet_access.py b/sysdata/parquet/parquet_access.py index 596b10fc4a..43d0542473 100644 --- a/sysdata/parquet/parquet_access.py +++ b/sysdata/parquet/parquet_access.py @@ -36,7 +36,7 @@ def _get_filename_given_data_type_and_identifier(self, data_type: str, identifie def _get_pathname_given_data_type(self, data_type: str): root = self.parquet_store path = os.path.join(root, data_type) - path.mkdir(parents=True, exist_ok=True) + Path(path).mkdir(parents=True, exist_ok=True) return path From 345ddf82e7c96ea187473fd01ed1249d4e903387 Mon Sep 17 00:00:00 2001 From: Rob Carver Date: Fri, 17 Nov 2023 13:20:21 +0000 Subject: [PATCH 073/235] parquet create files --- sysdata/parquet/parquet_adjusted_prices.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/sysdata/parquet/parquet_adjusted_prices.py b/sysdata/parquet/parquet_adjusted_prices.py index b0bca94090..56e447ac98 100644 --- a/sysdata/parquet/parquet_adjusted_prices.py +++ b/sysdata/parquet/parquet_adjusted_prices.py @@ -20,6 +20,9 @@ def __init__(self, parquet_access: ParquetAccess, log=get_logger("parquetFutures super().__init__(log=log) self._parquet = parquet_access + def __repr__(self): + return "parquetFuturesAdjustedPrices" + @property def parquet(self) -> ParquetAccess: return self._parquet @@ -30,7 +33,7 @@ def get_list_of_instruments(self) -> list: def _get_adjusted_prices_without_checking( self, instrument_code: str ) -> futuresAdjustedPrices: - return self.parquet.read_data_given_data_type_and_identifier(data_type=ADJPRICE_COLLECTION, identifier=instrument_code) + return futuresAdjustedPrices(self.parquet.read_data_given_data_type_and_identifier(data_type=ADJPRICE_COLLECTION, identifier=instrument_code)) def _delete_adjusted_prices_without_any_warning_be_careful( self, instrument_code: str From 609c22f60614f692d22ded50f0a41788364f1816 Mon Sep 17 00:00:00 2001 From: Rob Carver Date: Fri, 17 Nov 2023 13:24:13 +0000 Subject: [PATCH 074/235] missing extension --- requirements.txt | 1 + setup.py | 1 + sysdata/parquet/parquet_access.py | 2 +- 3 files changed, 3 insertions(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 7d19621e07..6d381481d0 100644 --- a/requirements.txt +++ b/requirements.txt @@ -12,3 +12,4 @@ Flask>=2.0.1 Werkzeug>=2.0.1 statsmodels==0.13.0 PyPDF2>=2.5.0 +pyarrow>=14.0.1 \ No newline at end of file diff --git a/setup.py b/setup.py index e6ae509a4c..454f4b819a 100755 --- a/setup.py +++ b/setup.py @@ -105,6 +105,7 @@ def dir_this_file(): "Werkzeug>=2.0.1", "statsmodels==0.12.2", "PyPDF2>=2.5.0", + "pyarrow>=14.0.1" ], tests_require=["nose", "flake8"], extras_require=dict(), diff --git a/sysdata/parquet/parquet_access.py b/sysdata/parquet/parquet_access.py index 43d0542473..23405ad6fc 100644 --- a/sysdata/parquet/parquet_access.py +++ b/sysdata/parquet/parquet_access.py @@ -10,7 +10,7 @@ def __init__(self, parquet_store_path: str): def get_all_identifiers_with_data_type(self, data_type: str): path= self._get_pathname_given_data_type(data_type) - return files_with_extension_in_pathname(path) + return files_with_extension_in_pathname(path, extension=EXTENSION) def does_idenitifier_with_data_type_exist(self, data_type: str, identifier: str) -> bool: filename = self._get_filename_given_data_type_and_identifier(data_type=data_type, identifier=identifier) From 64f0f5323c605b82d1eefb156ff34512152400b2 Mon Sep 17 00:00:00 2001 From: Rob Carver Date: Fri, 17 Nov 2023 13:28:34 +0000 Subject: [PATCH 075/235] missing extension --- sysdata/parquet/parquet_adjusted_prices.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/sysdata/parquet/parquet_adjusted_prices.py b/sysdata/parquet/parquet_adjusted_prices.py index 56e447ac98..56b5a51e6f 100644 --- a/sysdata/parquet/parquet_adjusted_prices.py +++ b/sysdata/parquet/parquet_adjusted_prices.py @@ -33,7 +33,8 @@ def get_list_of_instruments(self) -> list: def _get_adjusted_prices_without_checking( self, instrument_code: str ) -> futuresAdjustedPrices: - return futuresAdjustedPrices(self.parquet.read_data_given_data_type_and_identifier(data_type=ADJPRICE_COLLECTION, identifier=instrument_code)) + raw_price_df = self.parquet.read_data_given_data_type_and_identifier(data_type=ADJPRICE_COLLECTION, identifier=instrument_code) + return futuresAdjustedPrices(raw_price_df.squeeze()) def _delete_adjusted_prices_without_any_warning_be_careful( self, instrument_code: str From 7cfec21a3b523375c27f1bf705f76442d24cc1ff Mon Sep 17 00:00:00 2001 From: Rob Carver Date: Fri, 17 Nov 2023 13:40:57 +0000 Subject: [PATCH 076/235] added backup function --- sysproduction/backup_arctic_to_parquet.py | 471 ++++++++++++++++++++++ 1 file changed, 471 insertions(+) create mode 100644 sysproduction/backup_arctic_to_parquet.py diff --git a/sysproduction/backup_arctic_to_parquet.py b/sysproduction/backup_arctic_to_parquet.py new file mode 100644 index 0000000000..cdfd0a8ead --- /dev/null +++ b/sysproduction/backup_arctic_to_parquet.py @@ -0,0 +1,471 @@ +import os +import pandas as pd + +from syscore.exceptions import missingData +from syscore.pandas.pdutils import check_df_equals, check_ts_equals +from syscore.dateutils import CALENDAR_DAYS_IN_YEAR +from sysdata.data_blob import dataBlob + +from sysdata.parquet.parquet_adjusted_prices import parquetFuturesAdjustedPricesData + +from sysdata.csv.csv_futures_contracts import csvFuturesContractData +from sysdata.csv.csv_futures_contract_prices import csvFuturesContractPriceData +from sysdata.csv.csv_multiple_prices import csvFuturesMultiplePricesData +from sysdata.csv.csv_spot_fx import csvFxPricesData +from sysdata.csv.csv_contract_position_data import csvContractPositionData +from sysdata.csv.csv_strategy_position_data import csvStrategyPositionData +from sysdata.csv.csv_historic_orders import ( + csvStrategyHistoricOrdersData, + csvContractHistoricOrdersData, + csvBrokerHistoricOrdersData, +) +from sysdata.csv.csv_capital_data import csvCapitalData +from sysdata.csv.csv_optimal_position import csvOptimalPositionData +from sysdata.csv.csv_spread_costs import csvSpreadCostData +from sysdata.csv.csv_roll_state_storage import csvRollStateData +from sysdata.csv.csv_spreads import csvSpreadsForInstrumentData + +from sysdata.arctic.arctic_futures_per_contract_prices import ( + arcticFuturesContractPriceData, +) +from sysdata.arctic.arctic_multiple_prices import arcticFuturesMultiplePricesData +from sysdata.arctic.arctic_adjusted_prices import arcticFuturesAdjustedPricesData +from sysdata.arctic.arctic_spotfx_prices import arcticFxPricesData +from sysdata.arctic.arctic_spreads import arcticSpreadsForInstrumentData +from sysdata.arctic.arctic_capital import arcticCapitalData +from sysdata.arctic.arctic_historic_strategy_positions import arcticStrategyPositionData +from sysdata.arctic.arctic_historic_contract_positions import arcticContractPositionData +from sysdata.arctic.arctic_optimal_positions import arcticOptimalPositionData + +from sysdata.mongodb.mongo_futures_contracts import mongoFuturesContractData +from sysdata.mongodb.mongo_historic_orders import ( + mongoBrokerHistoricOrdersData, + mongoContractHistoricOrdersData, + mongoStrategyHistoricOrdersData, +) +from sysdata.mongodb.mongo_spread_costs import mongoSpreadCostData +from sysdata.mongodb.mongo_roll_state_storage import mongoRollStateData + +from sysobjects.contracts import futuresContract +from sysobjects.production.tradeable_object import instrumentStrategy + +from sysproduction.data.directories import get_csv_backup_directory, get_csv_dump_dir,get_parquet_root_directory +from sysproduction.data.strategies import get_list_of_strategies + + +def backup_arctic_to_parquet(): + + backup_data = get_data_blob("backup_arctic_to_parquet") + log = backup_data.log + + log.debug("Dumping from arctic, mongo to parquet files") + #backup_futures_contract_prices_to_csv(backup_data) + #backup_spreads_to_csv(backup_data) + #backup_fx_to_csv(backup_data) + #backup_multiple_to_csv(backup_data) + backup_adj_to_parquet(backup_data) + #backup_strategy_position_data(backup_data) + #backup_contract_position_data(backup_data) + #backup_historical_orders(backup_data) + #backup_capital(backup_data) + #backup_contract_data(backup_data) + #backup_spread_cost_data(backup_data) + #backup_optimal_positions(backup_data) + #backup_roll_state_data(backup_data) + + +def get_data_blob(logname): + + data = dataBlob( + keep_original_prefix=True, log_name=logname, parquet_store_path=get_parquet_root_directory() + ) + + data.add_class_list( + [ + #csvBrokerHistoricOrdersData, + #csvCapitalData, + #csvContractHistoricOrdersData, + #csvContractPositionData, + parquetFuturesAdjustedPricesData, + #csvFuturesContractData, + #csvFuturesContractPriceData, + #csvFuturesMultiplePricesData, + #csvFxPricesData, + #csvOptimalPositionData, + #csvRollStateData, + #csvSpreadCostData, + #csvSpreadsForInstrumentData, + #csvStrategyHistoricOrdersData, + #csvStrategyPositionData, + ] + ) + + data.add_class_list( + [ + arcticCapitalData, + arcticFuturesAdjustedPricesData, + arcticFuturesContractPriceData, + arcticFuturesMultiplePricesData, + arcticFxPricesData, + arcticSpreadsForInstrumentData, + mongoBrokerHistoricOrdersData, + mongoContractHistoricOrdersData, + arcticContractPositionData, + mongoFuturesContractData, + arcticOptimalPositionData, + mongoRollStateData, + mongoSpreadCostData, + mongoStrategyHistoricOrdersData, + arcticStrategyPositionData, + ] + ) + + return data + + +# Write function for each thing we want to backup +# Think about how to check for duplicates (data frame equals?) +def backup_adj_to_parquet(data): + instrument_list = data.arctic_futures_adjusted_prices.get_list_of_instruments() + for instrument_code in instrument_list: + backup_adj_to_parquet_for_instrument(data, instrument_code) + +def backup_adj_to_parquet_for_instrument(data: dataBlob, instrument_code: str): + arctic_data = data.arctic_futures_adjusted_prices.get_adjusted_prices( + instrument_code + ) + try: + data.parquet_futures_adjusted_prices.add_adjusted_prices( + instrument_code, arctic_data, ignore_duplication=True + ) + px = data.parquet_futures_adjusted_prices.get_adjusted_prices( + instrument_code + ) + data.log.debug( + "Written .parquet backup for adjusted prices %s, %s" % (instrument_code,str(px)) + ) + except BaseException: + data.log.warning( + "Problem writing .parquet backup for adjusted prices %s" % instrument_code + ) + + +# Futures contract data +def backup_futures_contract_prices_to_csv(data, ignore_long_expired: bool = True): + instrument_list = ( + data.arctic_futures_contract_price.get_list_of_instrument_codes_with_merged_price_data() + ) + for instrument_code in instrument_list: + backup_futures_contract_prices_for_instrument_to_csv( + data=data, + instrument_code=instrument_code, + ignore_long_expired=ignore_long_expired, + ) + + +def backup_futures_contract_prices_for_instrument_to_csv( + data: dataBlob, instrument_code: str, ignore_long_expired: bool = True +): + list_of_contracts = data.arctic_futures_contract_price.contracts_with_merged_price_data_for_instrument_code( + instrument_code + ) + + for futures_contract in list_of_contracts: + backup_futures_contract_prices_for_contract_to_csv( + data=data, + futures_contract=futures_contract, + ignore_long_expired=ignore_long_expired, + ) + + +def backup_futures_contract_prices_for_contract_to_csv( + data: dataBlob, futures_contract: futuresContract, ignore_long_expired: bool = True +): + if ignore_long_expired: + if futures_contract.days_since_expiry() > CALENDAR_DAYS_IN_YEAR: + ## Almost certainly expired, skip + data.log.debug("Skipping expired contract %s" % str(futures_contract)) + + return None + + arctic_data = ( + data.arctic_futures_contract_price.get_merged_prices_for_contract_object( + futures_contract + ) + ) + + csv_data = data.csv_futures_contract_price.get_merged_prices_for_contract_object( + futures_contract + ) + + if check_df_equals(arctic_data, csv_data): + # No update needed, move on + data.log.debug("No prices backup needed for %s" % str(futures_contract)) + else: + # Write backup + try: + data.csv_futures_contract_price.write_merged_prices_for_contract_object( + futures_contract, + arctic_data, + ignore_duplication=True, + ) + data.log.debug( + "Written backup .csv of prices for %s" % str(futures_contract) + ) + except BaseException: + data.log.warning( + "Problem writing .csv of prices for %s" % str(futures_contract) + ) + + +# fx +def backup_fx_to_csv(data): + fx_codes = data.arctic_fx_prices.get_list_of_fxcodes() + for fx_code in fx_codes: + arctic_data = data.arctic_fx_prices.get_fx_prices(fx_code) + csv_data = data.csv_fx_prices.get_fx_prices(fx_code) + if check_ts_equals(arctic_data, csv_data): + data.log.debug("No fx backup needed for %s" % fx_code) + else: + # Write backup + try: + data.csv_fx_prices.add_fx_prices( + fx_code, arctic_data, ignore_duplication=True + ) + data.log.debug("Written .csv backup for %s" % fx_code) + except BaseException: + data.log.warning("Problem writing .csv backup for %s" % fx_code) + + +def backup_multiple_to_csv(data): + instrument_list = data.arctic_futures_multiple_prices.get_list_of_instruments() + for instrument_code in instrument_list: + backup_multiple_to_csv_for_instrument(data, instrument_code) + + +def backup_multiple_to_csv_for_instrument(data, instrument_code: str): + arctic_data = data.arctic_futures_multiple_prices.get_multiple_prices( + instrument_code + ) + csv_data = data.csv_futures_multiple_prices.get_multiple_prices(instrument_code) + + if check_df_equals(arctic_data, csv_data): + data.log.debug("No multiple prices backup needed for %s" % instrument_code) + pass + else: + try: + data.csv_futures_multiple_prices.add_multiple_prices( + instrument_code, arctic_data, ignore_duplication=True + ) + data.log.debug( + "Written .csv backup multiple prices for %s" % instrument_code + ) + except BaseException: + data.log.warning( + "Problem writing .csv backup multiple prices for %s" % instrument_code + ) + + + + + + +def backup_spreads_to_csv(data: dataBlob): + instrument_list = data.arctic_spreads_for_instrument.get_list_of_instruments() + for instrument_code in instrument_list: + backup_spreads_to_csv_for_instrument(data, instrument_code) + + +def backup_spreads_to_csv_for_instrument(data: dataBlob, instrument_code: str): + arctic_data = data.arctic_spreads_for_instrument.get_spreads(instrument_code) + csv_data = data.csv_spreads_for_instrument.get_spreads(instrument_code) + + if check_ts_equals(arctic_data, csv_data): + data.log.debug("No spreads backup needed for %s" % instrument_code) + pass + else: + try: + data.csv_spreads_for_instrument.add_spreads( + instrument_code, arctic_data, ignore_duplication=True + ) + data.log.debug("Written .csv backup for spreads %s" % instrument_code) + except BaseException: + data.log.warning( + "Problem writing .csv backup for spreads %s" % instrument_code + ) + + +def backup_contract_position_data(data): + instrument_list = ( + data.arctic_contract_position.get_list_of_instruments_with_any_position() + ) + for instrument_code in instrument_list: + contract_list = ( + data.arctic_contract_position.get_list_of_contracts_for_instrument_code( + instrument_code + ) + ) + for contract in contract_list: + try: + arctic_data = data.arctic_contract_position.get_position_as_series_for_contract_object( + contract + ) + except missingData: + print("No data to write to .csv") + else: + data.csv_contract_position.overwrite_position_series_for_contract_object_without_checking( + contract, arctic_data + ) + data.log.debug( + "Backed up %s %s contract position data" % (instrument_code, contract) + ) + + +def backup_strategy_position_data(data): + strategy_list = get_list_of_strategies(data) + instrument_list = ( + data.arctic_contract_position.get_list_of_instruments_with_any_position() + ) + for strategy_name in strategy_list: + for instrument_code in instrument_list: + instrument_strategy = instrumentStrategy( + strategy_name=strategy_name, instrument_code=instrument_code + ) + try: + arctic_data = data.arctic_strategy_position.get_position_as_series_for_instrument_strategy_object( + instrument_strategy + ) + except missingData: + continue + data.csv_strategy_position.overwrite_position_series_for_instrument_strategy_without_checking( + instrument_strategy, arctic_data + ) + data.log.debug( + "Backed up %s %s strategy position data" + % (instrument_code, strategy_name) + ) + + +def backup_historical_orders(data): + data.log.debug("Backing up strategy orders...") + list_of_orders = [ + data.mongo_strategy_historic_orders.get_order_with_orderid(id) + for id in data.mongo_strategy_historic_orders.get_list_of_order_ids() + ] + data.csv_strategy_historic_orders.write_orders(list_of_orders) + data.log.debug("Done") + + data.log.debug("Backing up contract orders...") + list_of_orders = [ + data.mongo_contract_historic_orders.get_order_with_orderid(order_id) + for order_id in data.mongo_contract_historic_orders.get_list_of_order_ids() + ] + data.csv_contract_historic_orders.write_orders(list_of_orders) + data.log.debug("Done") + + data.log.debug("Backing up broker orders...") + list_of_orders = [ + data.mongo_broker_historic_orders.get_order_with_orderid(order_id) + for order_id in data.mongo_broker_historic_orders.get_list_of_order_ids() + ] + data.csv_broker_historic_orders.write_orders(list_of_orders) + data.log.debug("Done") + + +def backup_capital(data): + strategy_capital_dict = get_dict_of_strategy_capital(data) + capital_data_df = add_total_capital_to_strategy_capital_dict_return_df( + data, strategy_capital_dict + ) + capital_data_df = capital_data_df.ffill() + + data.csv_capital.write_backup_df_of_all_capital(capital_data_df) + + +def get_dict_of_strategy_capital(data: dataBlob) -> dict: + strategy_list = get_list_of_strategies(data) + strategy_capital_data = dict() + for strategy_name in strategy_list: + strategy_capital_data[ + strategy_name + ] = data.arctic_capital.get_capital_pd_df_for_strategy(strategy_name) + + return strategy_capital_data + + +def add_total_capital_to_strategy_capital_dict_return_df( + data: dataBlob, capital_data: dict +) -> pd.DataFrame: + + strategy_capital_as_df = pd.concat(capital_data, axis=1) + total_capital = data.arctic_capital.get_df_of_all_global_capital() + capital_data = pd.concat([strategy_capital_as_df, total_capital], axis=1) + + capital_data = capital_data.ffill() + + return capital_data + + +def backup_optimal_positions(data): + + strategy_instrument_list = ( + data.arctic_optimal_position.get_list_of_instrument_strategies_with_optimal_position() + ) + + for instrument_strategy in strategy_instrument_list: + try: + arctic_data = data.arctic_optimal_position.get_optimal_position_as_df_for_instrument_strategy( + instrument_strategy + ) + except missingData: + continue + data.csv_optimal_position.write_optimal_position_as_df_for_instrument_strategy_without_checking( + instrument_strategy, arctic_data + ) + data.log.debug("Backed up %s optimal position data" % str(instrument_strategy)) + + +def backup_spread_cost_data(data): + spread_cost_as_series = data.mongo_spread_cost.get_spread_costs_as_series() + data.csv_spread_cost.write_all_instrument_spreads(spread_cost_as_series) + data.log.debug("Backed up spread cost data") + + +def backup_roll_state_data(data): + instrument_list = data.mongo_roll_state.get_list_of_instruments() + roll_state_list = [] + for instrument_code in instrument_list: + roll_state = data.mongo_roll_state.get_name_of_roll_state(instrument_code) + roll_state_list.append(roll_state) + + roll_state_df = pd.DataFrame(roll_state_list, index=instrument_list) + roll_state_df.columns = ["state"] + data.csv_roll_state.write_all_instrument_data(roll_state_df) + data.log.debug("Backed up roll state") + + +def backup_contract_data(data): + instrument_list = ( + data.mongo_futures_contract.get_list_of_all_instruments_with_contracts() + ) + for instrument_code in instrument_list: + contract_list = ( + data.mongo_futures_contract.get_all_contract_objects_for_instrument_code( + instrument_code + ) + ) + data.csv_futures_contract.write_contract_list_as_df( + instrument_code, contract_list + ) + data.log.debug("Backed up contract data for %s" % instrument_code) + + +def backup_csv_dump(data): + source_path = get_csv_dump_dir() + destination_path = get_csv_backup_directory() + data.log.debug("Copy from %s to %s" % (source_path, destination_path)) + os.system("rsync -av %s %s" % (source_path, destination_path)) + + +if __name__ == "__main__": + backup_arctic_to_parquet() From d5ae9cbc54417fdfb963e0e2587f7a350342ecd3 Mon Sep 17 00:00:00 2001 From: Rob Carver Date: Fri, 17 Nov 2023 14:00:19 +0000 Subject: [PATCH 077/235] added capital --- sysdata/parquet/parquet_capital.py | 48 +++++++++++++++++++ .../transfer}/backup_arctic_to_parquet.py | 26 ++++------ 2 files changed, 56 insertions(+), 18 deletions(-) create mode 100644 sysdata/parquet/parquet_capital.py rename {sysproduction => sysinit/transfer}/backup_arctic_to_parquet.py (96%) diff --git a/sysdata/parquet/parquet_capital.py b/sysdata/parquet/parquet_capital.py new file mode 100644 index 0000000000..40ee5eef8c --- /dev/null +++ b/sysdata/parquet/parquet_capital.py @@ -0,0 +1,48 @@ +from syscore.exceptions import missingData +from sysdata.production.capital import capitalData + +CAPITAL_COLLECTION = "capital" + +from sysdata.parquet.parquet_access import ParquetAccess +from syslogging.logger import * +import pandas as pd + + +class parquetCapitalData(capitalData): + """ + Class to read / write multiple total capital data to and from arctic + """ + + def __init__(self, parquet_access: ParquetAccess, log=get_logger("parquetCapitalData")): + + super().__init__(log=log) + self._parquet = parquet_access + + def __repr__(self): + return "parquetCapitalData" + + @property + def parquet(self)-> ParquetAccess: + return self._parquet + + def _get_list_of_strategies_with_capital_including_total(self) -> list: + return self.parquet.get_all_identifiers_with_data_type(data_type=CAPITAL_COLLECTION) + + def get_capital_pd_df_for_strategy(self, strategy_name: str) -> pd.DataFrame: + try: + pd_df = self.parquet.read_data_given_data_type_and_identifier(data_type=CAPITAL_COLLECTION, identifier=strategy_name) + except: + raise missingData( + "Unable to get capital data from parquet for strategy %s" % strategy_name + ) + + return pd_df + + def _delete_all_capital_for_strategy_no_checking(self, strategy_name: str): + + self.parquet.delete_data_given_data_type_and_identifier(data_type=CAPITAL_COLLECTION, identifier=strategy_name) + + def update_capital_pd_df_for_strategy( + self, strategy_name: str, updated_capital_df: pd.DataFrame + ): + self.parquet.write_data_given_data_type_and_identifier(data_to_write=updated_capital_df, identifier=strategy_name, data_type=CAPITAL_COLLECTION) diff --git a/sysproduction/backup_arctic_to_parquet.py b/sysinit/transfer/backup_arctic_to_parquet.py similarity index 96% rename from sysproduction/backup_arctic_to_parquet.py rename to sysinit/transfer/backup_arctic_to_parquet.py index cdfd0a8ead..52ec66fe93 100644 --- a/sysproduction/backup_arctic_to_parquet.py +++ b/sysinit/transfer/backup_arctic_to_parquet.py @@ -7,6 +7,7 @@ from sysdata.data_blob import dataBlob from sysdata.parquet.parquet_adjusted_prices import parquetFuturesAdjustedPricesData +from sysdata.parquet.parquet_capital import parquetCapitalData from sysdata.csv.csv_futures_contracts import csvFuturesContractData from sysdata.csv.csv_futures_contract_prices import csvFuturesContractPriceData @@ -19,7 +20,6 @@ csvContractHistoricOrdersData, csvBrokerHistoricOrdersData, ) -from sysdata.csv.csv_capital_data import csvCapitalData from sysdata.csv.csv_optimal_position import csvOptimalPositionData from sysdata.csv.csv_spread_costs import csvSpreadCostData from sysdata.csv.csv_roll_state_storage import csvRollStateData @@ -67,7 +67,7 @@ def backup_arctic_to_parquet(): #backup_strategy_position_data(backup_data) #backup_contract_position_data(backup_data) #backup_historical_orders(backup_data) - #backup_capital(backup_data) + backup_capital(backup_data) #backup_contract_data(backup_data) #backup_spread_cost_data(backup_data) #backup_optimal_positions(backup_data) @@ -83,7 +83,7 @@ def get_data_blob(logname): data.add_class_list( [ #csvBrokerHistoricOrdersData, - #csvCapitalData, + parquetCapitalData, #csvContractHistoricOrdersData, #csvContractPositionData, parquetFuturesAdjustedPricesData, @@ -373,22 +373,12 @@ def backup_historical_orders(data): def backup_capital(data): - strategy_capital_dict = get_dict_of_strategy_capital(data) - capital_data_df = add_total_capital_to_strategy_capital_dict_return_df( - data, strategy_capital_dict - ) - capital_data_df = capital_data_df.ffill() - - data.csv_capital.write_backup_df_of_all_capital(capital_data_df) - - -def get_dict_of_strategy_capital(data: dataBlob) -> dict: - strategy_list = get_list_of_strategies(data) - strategy_capital_data = dict() + strategy_list = data.arctic_capital._get_list_of_strategies_with_capital_including_total() for strategy_name in strategy_list: - strategy_capital_data[ - strategy_name - ] = data.arctic_capital.get_capital_pd_df_for_strategy(strategy_name) + strategy_capital_data=data.arctic_capital.get_capital_pd_df_for_strategy(strategy_name) + data.parquet_capital.update_capital_pd_df_for_strategy(strategy_name=strategy_name, updated_capital_df=strategy_capital_data) + written_data = data.parquet_capital.get_capital_pd_df_for_strategy(strategy_name) + print("Wrote capital data for strategy %s, %s" % (strategy_name, str(written_data))) return strategy_capital_data From 29a73ac93689fa89e126dc0104caf0e59ff4b8ca Mon Sep 17 00:00:00 2001 From: Rob Carver Date: Fri, 17 Nov 2023 14:04:48 +0000 Subject: [PATCH 078/235] deal with ts in parquet --- sysdata/parquet/parquet_access.py | 2 +- sysinit/transfer/backup_arctic_to_parquet.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/sysdata/parquet/parquet_access.py b/sysdata/parquet/parquet_access.py index 23405ad6fc..d69576a5ae 100644 --- a/sysdata/parquet/parquet_access.py +++ b/sysdata/parquet/parquet_access.py @@ -22,7 +22,7 @@ def delete_data_given_data_type_and_identifier(self, data_type: str, identifier: def write_data_given_data_type_and_identifier(self, data_to_write: pd.DataFrame, data_type: str, identifier: str): filename = self._get_filename_given_data_type_and_identifier(data_type=data_type, identifier=identifier) - data_to_write.to_parquet(filename) + data_to_write.to_parquet(filename, coerce_timestamps='us',allow_truncated_timestamps=True) def read_data_given_data_type_and_identifier(self, data_type: str, identifier: str) -> pd.DataFrame: filename = self._get_filename_given_data_type_and_identifier(data_type=data_type, identifier=identifier) diff --git a/sysinit/transfer/backup_arctic_to_parquet.py b/sysinit/transfer/backup_arctic_to_parquet.py index 52ec66fe93..b39d6efae4 100644 --- a/sysinit/transfer/backup_arctic_to_parquet.py +++ b/sysinit/transfer/backup_arctic_to_parquet.py @@ -378,7 +378,7 @@ def backup_capital(data): strategy_capital_data=data.arctic_capital.get_capital_pd_df_for_strategy(strategy_name) data.parquet_capital.update_capital_pd_df_for_strategy(strategy_name=strategy_name, updated_capital_df=strategy_capital_data) written_data = data.parquet_capital.get_capital_pd_df_for_strategy(strategy_name) - print("Wrote capital data for strategy %s, %s" % (strategy_name, str(written_data))) + print("Wrote capital data for strategy %s, was %s now %s" % (strategy_name, str(strategy_capital_data), str(written_data))) return strategy_capital_data From 22f24331dae0176d0bdd74a50b68f6c4116ea068 Mon Sep 17 00:00:00 2001 From: Rob Carver Date: Fri, 17 Nov 2023 14:28:18 +0000 Subject: [PATCH 079/235] replace arctic adjusted prices with parquet - read the discussion! --- sysdata/sim/db_futures_sim_data.py | 4 ++-- .../adjustedprices_from_mongo_multiple_to_mongo.py | 10 +++++----- sysinit/futures/clone_data_for_instrument.py | 4 ++-- .../multiple_and_adjusted_from_csv_to_arctic.py | 7 +++---- sysinit/futures/repocsv_adjusted_prices.py | 6 +++--- sysproduction/backup_arctic_to_csv.py | 8 ++++---- sysproduction/data/prices.py | 8 ++++---- sysproduction/data/sim_data.py | 4 ++-- 8 files changed, 25 insertions(+), 26 deletions(-) diff --git a/sysdata/sim/db_futures_sim_data.py b/sysdata/sim/db_futures_sim_data.py index 90a360c39f..88d341b13a 100644 --- a/sysdata/sim/db_futures_sim_data.py +++ b/sysdata/sim/db_futures_sim_data.py @@ -5,7 +5,7 @@ from syscore.constants import arg_not_supplied -from sysdata.arctic.arctic_adjusted_prices import arcticFuturesAdjustedPricesData +from sysdata.parquet.parquet_adjusted_prices import parquetFuturesAdjustedPricesData from sysdata.arctic.arctic_multiple_prices import arcticFuturesMultiplePricesData from sysdata.arctic.arctic_spotfx_prices import arcticFxPricesData from sysdata.csv.csv_instrument_data import csvFuturesInstrumentData @@ -26,7 +26,7 @@ def __init__( data = dataBlob( log=log, class_list=[ - arcticFuturesAdjustedPricesData, + parquetFuturesAdjustedPricesData, arcticFuturesMultiplePricesData, arcticFxPricesData, csvFuturesInstrumentData, diff --git a/sysinit/futures/adjustedprices_from_mongo_multiple_to_mongo.py b/sysinit/futures/adjustedprices_from_mongo_multiple_to_mongo.py index 73e8b58c52..81ebcdf51c 100755 --- a/sysinit/futures/adjustedprices_from_mongo_multiple_to_mongo.py +++ b/sysinit/futures/adjustedprices_from_mongo_multiple_to_mongo.py @@ -6,7 +6,7 @@ """ from syscore.constants import arg_not_supplied from sysdata.arctic.arctic_multiple_prices import arcticFuturesMultiplePricesData -from sysdata.arctic.arctic_adjusted_prices import arcticFuturesAdjustedPricesData +from sysdata.parquet.parquet_adjusted_prices import parquetFuturesAdjustedPricesData from sysdata.csv.csv_adjusted_prices import csvFuturesAdjustedPricesData from sysobjects.adjusted_prices import futuresAdjustedPrices @@ -14,10 +14,10 @@ def _get_data_inputs(csv_adj_data_path): arctic_multiple_prices = arcticFuturesMultiplePricesData() - arctic_adjusted_prices = arcticFuturesAdjustedPricesData() + parquet_adjusted_prices = parquetFuturesAdjustedPricesData() csv_adjusted_prices = csvFuturesAdjustedPricesData(csv_adj_data_path) - return arctic_multiple_prices, arctic_adjusted_prices, csv_adjusted_prices + return arctic_multiple_prices, parquet_adjusted_prices, csv_adjusted_prices def process_adjusted_prices_all_instruments( @@ -44,7 +44,7 @@ def process_adjusted_prices_single_instrument( ): ( arctic_multiple_prices, - arctic_adjusted_prices, + parquet_adjusted_prices, csv_adjusted_prices, ) = _get_data_inputs(csv_adj_data_path) if multiple_prices is arg_not_supplied: @@ -56,7 +56,7 @@ def process_adjusted_prices_single_instrument( print(adjusted_prices) if ADD_TO_ARCTIC: - arctic_adjusted_prices.add_adjusted_prices( + parquet_adjusted_prices.add_adjusted_prices( instrument_code, adjusted_prices, ignore_duplication=True ) if ADD_TO_CSV: diff --git a/sysinit/futures/clone_data_for_instrument.py b/sysinit/futures/clone_data_for_instrument.py index 54f4e13d0c..8e98d39593 100644 --- a/sysinit/futures/clone_data_for_instrument.py +++ b/sysinit/futures/clone_data_for_instrument.py @@ -2,7 +2,7 @@ arcticFuturesContractPriceData, ) from sysdata.arctic.arctic_multiple_prices import arcticFuturesMultiplePricesData -from sysdata.arctic.arctic_adjusted_prices import arcticFuturesAdjustedPricesData +from sysdata.parquet.parquet_adjusted_prices import parquetFuturesAdjustedPricesData from sysdata.csv.csv_roll_calendars import csvRollCalendarData from sysdata.csv.csv_multiple_prices import csvFuturesMultiplePricesData @@ -16,7 +16,7 @@ db_data_individual_prices = arcticFuturesContractPriceData() db_data_multiple_prices = arcticFuturesMultiplePricesData() -db_data_adjusted_prices = arcticFuturesAdjustedPricesData() +db_data_adjusted_prices = parquetFuturesAdjustedPricesData() csv_roll_calendar = csvRollCalendarData() csv_multiple = csvFuturesMultiplePricesData() diff --git a/sysinit/futures/multiple_and_adjusted_from_csv_to_arctic.py b/sysinit/futures/multiple_and_adjusted_from_csv_to_arctic.py index 453c99fef2..4f91931faf 100644 --- a/sysinit/futures/multiple_and_adjusted_from_csv_to_arctic.py +++ b/sysinit/futures/multiple_and_adjusted_from_csv_to_arctic.py @@ -2,8 +2,7 @@ from sysdata.csv.csv_multiple_prices import csvFuturesMultiplePricesData from sysdata.csv.csv_adjusted_prices import csvFuturesAdjustedPricesData from sysdata.arctic.arctic_multiple_prices import arcticFuturesMultiplePricesData -from sysdata.arctic.arctic_adjusted_prices import arcticFuturesAdjustedPricesData - +from sysdata.parquet.parquet_adjusted_prices import parquetFuturesAdjustedPricesData def init_arctic_with_csv_futures_contract_prices( multiple_price_datapath=arg_not_supplied, adj_price_datapath=arg_not_supplied @@ -39,10 +38,10 @@ def init_arctic_with_csv_prices_for_code( ) csv_adj_data = csvFuturesAdjustedPricesData(adj_price_datapath) - arctic_adj_data = arcticFuturesAdjustedPricesData() + parquet_adj_data = parquetFuturesAdjustedPricesData() adj_prices = csv_adj_data.get_adjusted_prices(instrument_code) - arctic_adj_data.add_adjusted_prices( + parquet_adj_data.add_adjusted_prices( instrument_code, adj_prices, ignore_duplication=True ) diff --git a/sysinit/futures/repocsv_adjusted_prices.py b/sysinit/futures/repocsv_adjusted_prices.py index 49e32d2363..2977980240 100755 --- a/sysinit/futures/repocsv_adjusted_prices.py +++ b/sysinit/futures/repocsv_adjusted_prices.py @@ -2,12 +2,12 @@ Copy from csv repo files to arctic for adjusted prices """ from syscore.constants import arg_not_supplied -from sysdata.arctic.arctic_adjusted_prices import arcticFuturesAdjustedPricesData +from sysdata.parquet.parquet_adjusted_prices import parquetFuturesAdjustedPricesData from sysdata.csv.csv_adjusted_prices import csvFuturesAdjustedPricesData if __name__ == "__main__": input("Will overwrite existing prices are you sure?! CTL-C to abort") - arctic_adjusted_prices = arcticFuturesAdjustedPricesData() + parquet_adjusted_prices = parquetFuturesAdjustedPricesData() ## MODIFY PATH TO USE SOMETHING OTHER THAN DEFAULT csv_adj_datapath = arg_not_supplied @@ -26,6 +26,6 @@ print(adjusted_prices) - arctic_adjusted_prices.add_adjusted_prices( + parquet_adjusted_prices.add_adjusted_prices( instrument_code, adjusted_prices, ignore_duplication=True ) diff --git a/sysproduction/backup_arctic_to_csv.py b/sysproduction/backup_arctic_to_csv.py index 104aa998e6..d375957110 100644 --- a/sysproduction/backup_arctic_to_csv.py +++ b/sysproduction/backup_arctic_to_csv.py @@ -28,7 +28,7 @@ arcticFuturesContractPriceData, ) from sysdata.arctic.arctic_multiple_prices import arcticFuturesMultiplePricesData -from sysdata.arctic.arctic_adjusted_prices import arcticFuturesAdjustedPricesData +from sysdata.parquet.parquet_adjusted_prices import parquetFuturesAdjustedPricesData from sysdata.arctic.arctic_spotfx_prices import arcticFxPricesData from sysdata.arctic.arctic_spreads import arcticSpreadsForInstrumentData from sysdata.arctic.arctic_capital import arcticCapitalData @@ -148,7 +148,7 @@ def get_data_and_create_csv_directories(logname): data.add_class_list( [ arcticCapitalData, - arcticFuturesAdjustedPricesData, + parquetFuturesAdjustedPricesData, arcticFuturesContractPriceData, arcticFuturesMultiplePricesData, arcticFxPricesData, @@ -289,13 +289,13 @@ def backup_multiple_to_csv_for_instrument(data, instrument_code: str): def backup_adj_to_csv(data): - instrument_list = data.arctic_futures_adjusted_prices.get_list_of_instruments() + instrument_list = data.parquet_futures_adjusted_prices.get_list_of_instruments() for instrument_code in instrument_list: backup_adj_to_csv_for_instrument(data, instrument_code) def backup_adj_to_csv_for_instrument(data: dataBlob, instrument_code: str): - arctic_data = data.arctic_futures_adjusted_prices.get_adjusted_prices( + arctic_data = data.parquet_futures_adjusted_prices.get_adjusted_prices( instrument_code ) csv_data = data.csv_futures_adjusted_prices.get_adjusted_prices(instrument_code) diff --git a/sysproduction/data/prices.py b/sysproduction/data/prices.py index 2e1d6638cc..9cf7005b14 100644 --- a/sysproduction/data/prices.py +++ b/sysproduction/data/prices.py @@ -21,8 +21,8 @@ arcticFuturesMultiplePricesData, futuresMultiplePrices, ) -from sysdata.arctic.arctic_adjusted_prices import ( - arcticFuturesAdjustedPricesData, +from sysdata.parquet.parquet_adjusted_prices import ( + parquetFuturesAdjustedPricesData, futuresAdjustedPrices, ) from sysdata.arctic.arctic_spreads import ( @@ -56,7 +56,7 @@ def _add_required_classes_to_data(self, data) -> dataBlob: data.add_class_list( [ arcticFuturesContractPriceData, - arcticFuturesAdjustedPricesData, + parquetFuturesAdjustedPricesData, arcticFuturesMultiplePricesData, mongoFuturesContractData, arcticSpreadsForInstrumentData, @@ -253,7 +253,7 @@ def _add_required_classes_to_data(self, data) -> dataBlob: arcticFuturesContractPriceData, arcticFuturesMultiplePricesData, mongoFuturesContractData, - arcticFuturesAdjustedPricesData, + parquetFuturesAdjustedPricesData, arcticSpreadsForInstrumentData, ] ) diff --git a/sysproduction/data/sim_data.py b/sysproduction/data/sim_data.py index 1653d74b4d..bcbd921a1c 100644 --- a/sysproduction/data/sim_data.py +++ b/sysproduction/data/sim_data.py @@ -2,7 +2,7 @@ from sysdata.sim.db_futures_sim_data import dbFuturesSimData from sysdata.data_blob import dataBlob -from sysdata.arctic.arctic_adjusted_prices import arcticFuturesAdjustedPricesData +from sysdata.parquet.parquet_adjusted_prices import parquetFuturesAdjustedPricesData from sysdata.arctic.arctic_multiple_prices import arcticFuturesMultiplePricesData from sysdata.arctic.arctic_spotfx_prices import arcticFxPricesData from sysdata.csv.csv_instrument_data import csvFuturesInstrumentData @@ -17,7 +17,7 @@ def get_sim_data_object_for_production(data=arg_not_supplied) -> dbFuturesSimDat data.add_class_list( [ - arcticFuturesAdjustedPricesData, + parquetFuturesAdjustedPricesData, arcticFuturesMultiplePricesData, arcticFxPricesData, mongoSpreadCostData, From 568c13e0378a7f786caf3fd3de2eb12a8f825ba0 Mon Sep 17 00:00:00 2001 From: Rob Carver Date: Fri, 17 Nov 2023 14:34:30 +0000 Subject: [PATCH 080/235] single point of xfer --- sysdata/pointers.py | 4 ++++ sysdata/sim/db_futures_sim_data.py | 2 +- .../futures/adjustedprices_from_mongo_multiple_to_mongo.py | 2 +- sysinit/futures/clone_data_for_instrument.py | 3 +-- .../futures/multiple_and_adjusted_from_csv_to_arctic.py | 2 +- sysinit/futures/repocsv_adjusted_prices.py | 2 +- sysproduction/backup_arctic_to_csv.py | 2 +- sysproduction/data/prices.py | 7 +++---- 8 files changed, 13 insertions(+), 11 deletions(-) create mode 100644 sysdata/pointers.py diff --git a/sysdata/pointers.py b/sysdata/pointers.py new file mode 100644 index 0000000000..1446b38153 --- /dev/null +++ b/sysdata/pointers.py @@ -0,0 +1,4 @@ +from sysdata.parquet.parquet_adjusted_prices import parquetFuturesAdjustedPricesData +from sysdata.arctic.arctic_adjusted_prices import arcticFuturesAdjustedPricesData + +parquetFuturesAdjustedPricesData = parquetFuturesAdjustedPricesData ## change to arctic if desired diff --git a/sysdata/sim/db_futures_sim_data.py b/sysdata/sim/db_futures_sim_data.py index 88d341b13a..891636e506 100644 --- a/sysdata/sim/db_futures_sim_data.py +++ b/sysdata/sim/db_futures_sim_data.py @@ -5,7 +5,7 @@ from syscore.constants import arg_not_supplied -from sysdata.parquet.parquet_adjusted_prices import parquetFuturesAdjustedPricesData +from sysdata.pointers import parquetFuturesAdjustedPricesData from sysdata.arctic.arctic_multiple_prices import arcticFuturesMultiplePricesData from sysdata.arctic.arctic_spotfx_prices import arcticFxPricesData from sysdata.csv.csv_instrument_data import csvFuturesInstrumentData diff --git a/sysinit/futures/adjustedprices_from_mongo_multiple_to_mongo.py b/sysinit/futures/adjustedprices_from_mongo_multiple_to_mongo.py index 81ebcdf51c..55935e031d 100755 --- a/sysinit/futures/adjustedprices_from_mongo_multiple_to_mongo.py +++ b/sysinit/futures/adjustedprices_from_mongo_multiple_to_mongo.py @@ -6,7 +6,7 @@ """ from syscore.constants import arg_not_supplied from sysdata.arctic.arctic_multiple_prices import arcticFuturesMultiplePricesData -from sysdata.parquet.parquet_adjusted_prices import parquetFuturesAdjustedPricesData +from sysdata.pointers import parquetFuturesAdjustedPricesData from sysdata.csv.csv_adjusted_prices import csvFuturesAdjustedPricesData from sysobjects.adjusted_prices import futuresAdjustedPrices diff --git a/sysinit/futures/clone_data_for_instrument.py b/sysinit/futures/clone_data_for_instrument.py index 8e98d39593..15bc6cef34 100644 --- a/sysinit/futures/clone_data_for_instrument.py +++ b/sysinit/futures/clone_data_for_instrument.py @@ -2,8 +2,7 @@ arcticFuturesContractPriceData, ) from sysdata.arctic.arctic_multiple_prices import arcticFuturesMultiplePricesData -from sysdata.parquet.parquet_adjusted_prices import parquetFuturesAdjustedPricesData - +from sysdata.pointers import parquetFuturesAdjustedPricesData from sysdata.csv.csv_roll_calendars import csvRollCalendarData from sysdata.csv.csv_multiple_prices import csvFuturesMultiplePricesData from sysdata.csv.csv_adjusted_prices import csvFuturesAdjustedPricesData diff --git a/sysinit/futures/multiple_and_adjusted_from_csv_to_arctic.py b/sysinit/futures/multiple_and_adjusted_from_csv_to_arctic.py index 4f91931faf..2d5725a109 100644 --- a/sysinit/futures/multiple_and_adjusted_from_csv_to_arctic.py +++ b/sysinit/futures/multiple_and_adjusted_from_csv_to_arctic.py @@ -2,7 +2,7 @@ from sysdata.csv.csv_multiple_prices import csvFuturesMultiplePricesData from sysdata.csv.csv_adjusted_prices import csvFuturesAdjustedPricesData from sysdata.arctic.arctic_multiple_prices import arcticFuturesMultiplePricesData -from sysdata.parquet.parquet_adjusted_prices import parquetFuturesAdjustedPricesData +from sysdata.pointers import parquetFuturesAdjustedPricesData def init_arctic_with_csv_futures_contract_prices( multiple_price_datapath=arg_not_supplied, adj_price_datapath=arg_not_supplied diff --git a/sysinit/futures/repocsv_adjusted_prices.py b/sysinit/futures/repocsv_adjusted_prices.py index 2977980240..9a0abf2974 100755 --- a/sysinit/futures/repocsv_adjusted_prices.py +++ b/sysinit/futures/repocsv_adjusted_prices.py @@ -2,7 +2,7 @@ Copy from csv repo files to arctic for adjusted prices """ from syscore.constants import arg_not_supplied -from sysdata.parquet.parquet_adjusted_prices import parquetFuturesAdjustedPricesData +from sysdata.pointers import parquetFuturesAdjustedPricesData from sysdata.csv.csv_adjusted_prices import csvFuturesAdjustedPricesData if __name__ == "__main__": diff --git a/sysproduction/backup_arctic_to_csv.py b/sysproduction/backup_arctic_to_csv.py index d375957110..95b7a763ae 100644 --- a/sysproduction/backup_arctic_to_csv.py +++ b/sysproduction/backup_arctic_to_csv.py @@ -28,7 +28,7 @@ arcticFuturesContractPriceData, ) from sysdata.arctic.arctic_multiple_prices import arcticFuturesMultiplePricesData -from sysdata.parquet.parquet_adjusted_prices import parquetFuturesAdjustedPricesData +from sysdata.pointers import parquetFuturesAdjustedPricesData from sysdata.arctic.arctic_spotfx_prices import arcticFxPricesData from sysdata.arctic.arctic_spreads import arcticSpreadsForInstrumentData from sysdata.arctic.arctic_capital import arcticCapitalData diff --git a/sysproduction/data/prices.py b/sysproduction/data/prices.py index 9cf7005b14..de9df8d294 100644 --- a/sysproduction/data/prices.py +++ b/sysproduction/data/prices.py @@ -21,10 +21,9 @@ arcticFuturesMultiplePricesData, futuresMultiplePrices, ) -from sysdata.parquet.parquet_adjusted_prices import ( - parquetFuturesAdjustedPricesData, - futuresAdjustedPrices, -) +from sysdata.pointers import parquetFuturesAdjustedPricesData +from sysobjects.adjusted_prices import futuresAdjustedPrices + from sysdata.arctic.arctic_spreads import ( arcticSpreadsForInstrumentData, spreadsForInstrumentData, From 88f0bb5c5cca8787502d81b336c34be6f2809bfb Mon Sep 17 00:00:00 2001 From: Rob Carver Date: Fri, 17 Nov 2023 14:43:35 +0000 Subject: [PATCH 081/235] single point of xfer --- sysdata/data_blob.py | 20 +++++++++++++++++--- sysinit/transfer/backup_arctic_to_parquet.py | 2 +- sysproduction/data/directories.py | 3 --- 3 files changed, 18 insertions(+), 7 deletions(-) diff --git a/sysdata/data_blob.py b/sysdata/data_blob.py index 88357381e6..485f915e75 100644 --- a/sysdata/data_blob.py +++ b/sysdata/data_blob.py @@ -3,6 +3,7 @@ from sysbrokers.IB.ib_connection import connectionIB from syscore.objects import get_class_name from syscore.constants import arg_not_supplied +from syscore.fileutils import get_resolved_pathname from syscore.text import camel_case_split from sysdata.config.production_config import get_production_config, Config from sysdata.mongodb.mongo_connection import mongoDb @@ -329,9 +330,18 @@ def mongo_db(self) -> mongoDb: @property def parquet_access(self) -> ParquetAccess: - if self._parquet_store_path is arg_not_supplied: - raise Exception("Need to define parquet_store in config to use parquet") - return ParquetAccess(self._parquet_store_path) + return ParquetAccess(self.parquet_root_directory) + + @property + def parquet_root_directory(self) -> str: + path = self._parquet_store_path + if path is arg_not_supplied: + try: + path = get_parquet_root_directory(self.config) + except: + raise Exception("Need to define parquet_store in config to use parquet") + + return path def _get_new_mongo_db(self) -> mongoDb: mongo_db = mongoDb() @@ -367,6 +377,10 @@ def log_name(self) -> str: source_dict = dict(arctic="db", mongo="db", csv="db", parquet="db",ib="broker") +def get_parquet_root_directory(config): + path = config.get_element("parquet_store") + return get_resolved_pathname(path) + def identifying_name(split_up_name: list, keep_original_prefix=False) -> str: """ diff --git a/sysinit/transfer/backup_arctic_to_parquet.py b/sysinit/transfer/backup_arctic_to_parquet.py index b39d6efae4..5adc226dc0 100644 --- a/sysinit/transfer/backup_arctic_to_parquet.py +++ b/sysinit/transfer/backup_arctic_to_parquet.py @@ -49,7 +49,7 @@ from sysobjects.contracts import futuresContract from sysobjects.production.tradeable_object import instrumentStrategy -from sysproduction.data.directories import get_csv_backup_directory, get_csv_dump_dir,get_parquet_root_directory +from sysproduction.data.directories import get_csv_backup_directory, get_csv_dump_dir from sysproduction.data.strategies import get_list_of_strategies diff --git a/sysproduction/data/directories.py b/sysproduction/data/directories.py index 5df6954d75..a6b019bb8e 100644 --- a/sysproduction/data/directories.py +++ b/sysproduction/data/directories.py @@ -6,9 +6,6 @@ production_config = get_production_config() -def get_parquet_root_directory(): - path = production_config.get_element("parquet_store") - return get_resolved_pathname(path) def get_main_backup_directory(): ans = production_config.get_element("offsystem_backup_directory") From 97d898472dff0b46919e596e7eb6cf6301493996 Mon Sep 17 00:00:00 2001 From: Rob Carver Date: Fri, 17 Nov 2023 14:46:25 +0000 Subject: [PATCH 082/235] reorder backup order; --- sysproduction/backup_arctic_to_csv.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sysproduction/backup_arctic_to_csv.py b/sysproduction/backup_arctic_to_csv.py index 95b7a763ae..3ed234a02f 100644 --- a/sysproduction/backup_arctic_to_csv.py +++ b/sysproduction/backup_arctic_to_csv.py @@ -76,11 +76,11 @@ def backup_arctic_to_csv(self): log = self.data.log log.debug("Dumping from arctic, mongo to .csv files") + backup_adj_to_csv(backup_data) backup_futures_contract_prices_to_csv(backup_data) backup_spreads_to_csv(backup_data) backup_fx_to_csv(backup_data) backup_multiple_to_csv(backup_data) - backup_adj_to_csv(backup_data) backup_strategy_position_data(backup_data) backup_contract_position_data(backup_data) backup_historical_orders(backup_data) From 2258e31ddce8bcb6c27e8fdbd023049b90e7eb1c Mon Sep 17 00:00:00 2001 From: Rob Carver Date: Fri, 17 Nov 2023 14:54:11 +0000 Subject: [PATCH 083/235] use parquet rather than arctic capital --- sysdata/pointers.py | 9 +++++++-- sysproduction/backup_arctic_to_csv.py | 7 ++++--- sysproduction/data/capital.py | 4 ++-- 3 files changed, 13 insertions(+), 7 deletions(-) diff --git a/sysdata/pointers.py b/sysdata/pointers.py index 1446b38153..f85b80d9b3 100644 --- a/sysdata/pointers.py +++ b/sysdata/pointers.py @@ -1,4 +1,9 @@ -from sysdata.parquet.parquet_adjusted_prices import parquetFuturesAdjustedPricesData +from sysdata.parquet.parquet_adjusted_prices import parquetFuturesAdjustedPricesData as og_parquetFuturesAdjustedPricesData from sysdata.arctic.arctic_adjusted_prices import arcticFuturesAdjustedPricesData -parquetFuturesAdjustedPricesData = parquetFuturesAdjustedPricesData ## change to arctic if desired +from sysdata.parquet.parquet_capital import parquetCapitalData as og_parquetCapitalData +from sysdata.arctic.arctic_capital import arcticCapitalData + +## TO USE ARCTIC RATHER THAN PARQUET, REPLACE THE og_ with the relevant arctic class +parquetFuturesAdjustedPricesData = og_parquetFuturesAdjustedPricesData ## change to arctic if desired +parquetCapitalData = og_parquetCapitalData ## change to arctic \ No newline at end of file diff --git a/sysproduction/backup_arctic_to_csv.py b/sysproduction/backup_arctic_to_csv.py index 3ed234a02f..06f7232d07 100644 --- a/sysproduction/backup_arctic_to_csv.py +++ b/sysproduction/backup_arctic_to_csv.py @@ -29,9 +29,10 @@ ) from sysdata.arctic.arctic_multiple_prices import arcticFuturesMultiplePricesData from sysdata.pointers import parquetFuturesAdjustedPricesData +from sysdata.pointers import parquetCapitalData + from sysdata.arctic.arctic_spotfx_prices import arcticFxPricesData from sysdata.arctic.arctic_spreads import arcticSpreadsForInstrumentData -from sysdata.arctic.arctic_capital import arcticCapitalData from sysdata.arctic.arctic_historic_strategy_positions import arcticStrategyPositionData from sysdata.arctic.arctic_historic_contract_positions import arcticContractPositionData from sysdata.arctic.arctic_optimal_positions import arcticOptimalPositionData @@ -147,7 +148,7 @@ def get_data_and_create_csv_directories(logname): data.add_class_list( [ - arcticCapitalData, + parquetCapitalData, parquetFuturesAdjustedPricesData, arcticFuturesContractPriceData, arcticFuturesMultiplePricesData, @@ -435,7 +436,7 @@ def get_dict_of_strategy_capital(data: dataBlob) -> dict: for strategy_name in strategy_list: strategy_capital_data[ strategy_name - ] = data.arctic_capital.get_capital_pd_df_for_strategy(strategy_name) + ] = data.parquet_capital.get_capital_pd_df_for_strategy(strategy_name) return strategy_capital_data diff --git a/sysproduction/data/capital.py b/sysproduction/data/capital.py index d622fbb5d9..7209797205 100644 --- a/sysproduction/data/capital.py +++ b/sysproduction/data/capital.py @@ -7,7 +7,7 @@ from sysdata.production.capital import capitalData, totalCapitalCalculationData from sysdata.production.margin import marginData, seriesOfMargin -from sysdata.arctic.arctic_capital import arcticCapitalData +from sysdata.pointers import parquetCapitalData from sysdata.mongodb.mongo_margin import mongoMarginData from sysdata.data_blob import dataBlob @@ -18,7 +18,7 @@ class dataCapital(productionDataLayerGeneric): def _add_required_classes_to_data(self, data) -> dataBlob: - data.add_class_object(arcticCapitalData) + data.add_class_object(parquetCapitalData) return data From 6650a915fa00f5fa3abdc036e82837b6f2a7757a Mon Sep 17 00:00:00 2001 From: Rob Carver Date: Fri, 17 Nov 2023 14:56:07 +0000 Subject: [PATCH 084/235] revert to arctic capital --- sysdata/pointers.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sysdata/pointers.py b/sysdata/pointers.py index f85b80d9b3..29930e266c 100644 --- a/sysdata/pointers.py +++ b/sysdata/pointers.py @@ -6,4 +6,4 @@ ## TO USE ARCTIC RATHER THAN PARQUET, REPLACE THE og_ with the relevant arctic class parquetFuturesAdjustedPricesData = og_parquetFuturesAdjustedPricesData ## change to arctic if desired -parquetCapitalData = og_parquetCapitalData ## change to arctic \ No newline at end of file +parquetCapitalData = arcticCapitalData \ No newline at end of file From 220782e56cc0d5027591da5433be6dd7cc6ea16f Mon Sep 17 00:00:00 2001 From: Rob Carver Date: Fri, 17 Nov 2023 15:01:41 +0000 Subject: [PATCH 085/235] add explicit column name to capital df --- sysdata/production/capital.py | 1 + 1 file changed, 1 insertion(+) diff --git a/sysdata/production/capital.py b/sysdata/production/capital.py index 2cca790b67..2c7dd71d7d 100644 --- a/sysdata/production/capital.py +++ b/sysdata/production/capital.py @@ -176,6 +176,7 @@ def update_capital_value_for_strategy( new_capital_item = pd.Series([new_capital_value], [date]) updated_capital_series = pd.concat([capital_series, new_capital_item], axis=0) updated_capital_df = updated_capital_series.to_frame() + updated_capital_df.columns = ['capital'] self.update_capital_pd_df_for_strategy(strategy_name, updated_capital_df) From 00b6cb6818bc1c4d365bf384ea30b8466b6b8c4d Mon Sep 17 00:00:00 2001 From: Rob Carver Date: Fri, 17 Nov 2023 15:02:32 +0000 Subject: [PATCH 086/235] revert to parquet capital --- sysdata/parquet/parquet_capital.py | 1 + sysdata/pointers.py | 2 +- sysdata/production/capital.py | 1 - 3 files changed, 2 insertions(+), 2 deletions(-) diff --git a/sysdata/parquet/parquet_capital.py b/sysdata/parquet/parquet_capital.py index 40ee5eef8c..0f9b6af0ed 100644 --- a/sysdata/parquet/parquet_capital.py +++ b/sysdata/parquet/parquet_capital.py @@ -45,4 +45,5 @@ def _delete_all_capital_for_strategy_no_checking(self, strategy_name: str): def update_capital_pd_df_for_strategy( self, strategy_name: str, updated_capital_df: pd.DataFrame ): + updated_capital_df.columns = ['capital'] self.parquet.write_data_given_data_type_and_identifier(data_to_write=updated_capital_df, identifier=strategy_name, data_type=CAPITAL_COLLECTION) diff --git a/sysdata/pointers.py b/sysdata/pointers.py index 29930e266c..fc626c624b 100644 --- a/sysdata/pointers.py +++ b/sysdata/pointers.py @@ -6,4 +6,4 @@ ## TO USE ARCTIC RATHER THAN PARQUET, REPLACE THE og_ with the relevant arctic class parquetFuturesAdjustedPricesData = og_parquetFuturesAdjustedPricesData ## change to arctic if desired -parquetCapitalData = arcticCapitalData \ No newline at end of file +parquetCapitalData = og_parquetCapitalData \ No newline at end of file diff --git a/sysdata/production/capital.py b/sysdata/production/capital.py index 2c7dd71d7d..2cca790b67 100644 --- a/sysdata/production/capital.py +++ b/sysdata/production/capital.py @@ -176,7 +176,6 @@ def update_capital_value_for_strategy( new_capital_item = pd.Series([new_capital_value], [date]) updated_capital_series = pd.concat([capital_series, new_capital_item], axis=0) updated_capital_df = updated_capital_series.to_frame() - updated_capital_df.columns = ['capital'] self.update_capital_pd_df_for_strategy(strategy_name, updated_capital_df) From 5f1453630e560df37823d78c68c0dec07923fecc Mon Sep 17 00:00:00 2001 From: Rob Carver Date: Fri, 17 Nov 2023 15:05:20 +0000 Subject: [PATCH 087/235] revert to parquet capital --- sysdata/parquet/parquet_capital.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/sysdata/parquet/parquet_capital.py b/sysdata/parquet/parquet_capital.py index 0f9b6af0ed..1e9a794c93 100644 --- a/sysdata/parquet/parquet_capital.py +++ b/sysdata/parquet/parquet_capital.py @@ -45,5 +45,8 @@ def _delete_all_capital_for_strategy_no_checking(self, strategy_name: str): def update_capital_pd_df_for_strategy( self, strategy_name: str, updated_capital_df: pd.DataFrame ): - updated_capital_df.columns = ['capital'] + if len(updated_capital_df.columns)==1: + ## single strategy, need columns labelling + updated_capital_df.columns = ['capital'] + self.parquet.write_data_given_data_type_and_identifier(data_to_write=updated_capital_df, identifier=strategy_name, data_type=CAPITAL_COLLECTION) From 2e58217f44d88862ce06f0ca3fbab6ea4d343967 Mon Sep 17 00:00:00 2001 From: Rob Carver Date: Fri, 17 Nov 2023 15:30:05 +0000 Subject: [PATCH 088/235] contract prices in backup --- .../parquet_futures_per_contract_prices.py | 220 ++++++++++++++++++ .../parquet_historic_contract_positions.py | 0 sysdata/parquet/parquet_multiple_prices.py | 0 sysdata/parquet/parquet_optimal_positions.py | 0 sysdata/parquet/parquet_spotfx_prices.py | 0 sysdata/parquet/parquet_spreads.py | 0 sysinit/transfer/backup_arctic_to_parquet.py | 83 ++++--- 7 files changed, 266 insertions(+), 37 deletions(-) create mode 100644 sysdata/parquet/parquet_futures_per_contract_prices.py create mode 100644 sysdata/parquet/parquet_historic_contract_positions.py create mode 100644 sysdata/parquet/parquet_multiple_prices.py create mode 100644 sysdata/parquet/parquet_optimal_positions.py create mode 100644 sysdata/parquet/parquet_spotfx_prices.py create mode 100644 sysdata/parquet/parquet_spreads.py diff --git a/sysdata/parquet/parquet_futures_per_contract_prices.py b/sysdata/parquet/parquet_futures_per_contract_prices.py new file mode 100644 index 0000000000..22c8dfcd53 --- /dev/null +++ b/sysdata/parquet/parquet_futures_per_contract_prices.py @@ -0,0 +1,220 @@ +from typing import Tuple +from syscore.dateutils import Frequency, MIXED_FREQ + +from sysdata.parquet.parquet_access import ParquetAccess +from sysdata.futures.futures_per_contract_prices import ( + futuresContractPriceData, + listOfFuturesContracts, +) +from sysobjects.futures_per_contract_prices import futuresContractPrices +from sysobjects.contracts import futuresContract +from syslogging.logger import * + + +import pandas as pd + +CONTRACT_COLLECTION = "futures_contract_prices" + + +class parquetFuturesContractPriceData(futuresContractPriceData): + """ + Class to read / write futures price data to and from arctic + """ + + def __init__(self, parquet_access: ParquetAccess, log=get_logger("parquetFuturesContractPriceData")): + + super().__init__(log=log) + self._parquet = parquet_access + + def __repr__(self): + return "parquetFuturesContractPriceData" + + @property + def parquet(self) -> ParquetAccess: + return self._parquet + + def _get_merged_prices_for_contract_object_no_checking( + self, futures_contract_object: futuresContract + ) -> futuresContractPrices: + + # Returns a data frame which should have the right format + data = self._get_prices_at_frequency_for_contract_object_no_checking( + futures_contract_object, frequency=MIXED_FREQ + ) + + return data + + def _get_prices_at_frequency_for_contract_object_no_checking( + self, futures_contract_object: futuresContract, frequency: Frequency + ) -> futuresContractPrices: + + ident = from_contract_and_freq_to_key( + futures_contract_object, frequency=frequency + ) + + # Returns a data frame which should have the right format + data = self.parquet.read_data_given_data_type_and_identifier(data_type=CONTRACT_COLLECTION, identifier=ident) + + return futuresContractPrices(data) + + def _write_merged_prices_for_contract_object_no_checking( + self, + futures_contract_object: futuresContract, + futures_price_data: futuresContractPrices, + ): + """ + Write prices + CHECK prices are overriden on second write + + :param futures_contract_object: futuresContract + :param futures_price_data: futuresContractPriceData + :return: None + """ + + self._write_prices_at_frequency_for_contract_object_no_checking( + futures_contract_object=futures_contract_object, + frequency=MIXED_FREQ, + futures_price_data=futures_price_data, + ) + + def _write_prices_at_frequency_for_contract_object_no_checking( + self, + futures_contract_object: futuresContract, + futures_price_data: futuresContractPrices, + frequency: Frequency, + ): + + log = futures_contract_object.log(self.log) + ident = from_contract_and_freq_to_key( + futures_contract_object, frequency=frequency + ) + futures_price_data_as_pd = pd.DataFrame(futures_price_data) + + self.parquet.write_data_given_data_type_and_identifier(data_type=CONTRACT_COLLECTION, identifier=ident, data_to_write=futures_price_data_as_pd) + + log.debug( + "Wrote %s lines of prices for %s at %s to %s" + % ( + len(futures_price_data), + str(futures_contract_object.key), + str(frequency), + str(self), + ) + ) + + def get_contracts_with_merged_price_data(self) -> listOfFuturesContracts: + """ + + :return: list of contracts + """ + + list_of_contracts = self.get_contracts_with_price_data_for_frequency( + frequency=MIXED_FREQ + ) + + return list_of_contracts + + def get_contracts_with_price_data_for_frequency( + self, frequency: Frequency + ) -> listOfFuturesContracts: + + list_of_contract_and_freq_tuples = ( + self._get_contract_and_frequencies_with_price_data() + ) + list_of_contracts = [ + freq_and_contract_tuple[1] + for freq_and_contract_tuple in list_of_contract_and_freq_tuples + if freq_and_contract_tuple[0] == frequency + ] + + list_of_contracts = listOfFuturesContracts(list_of_contracts) + + return list_of_contracts + + def has_merged_price_data_for_contract( + self, contract_object: futuresContract + ) -> bool: + return self.has_price_data_for_contract_at_frequency( + contract_object, frequency=MIXED_FREQ + ) + + def has_price_data_for_contract_at_frequency( + self, contract_object: futuresContract, frequency: Frequency + ) -> bool: + ident =from_contract_and_freq_to_key(contract_object, frequency=frequency) + return self.parquet.does_idenitifier_with_data_type_exist(data_type=CONTRACT_COLLECTION, identifier=ident) + + def _get_contract_and_frequencies_with_price_data(self) -> list: + """ + + :return: list of futures contracts as tuples + """ + + all_keynames = self._all_keynames_in_library() + list_of_contract_and_freq_tuples = [ + from_key_to_freq_and_contract(keyname) for keyname in all_keynames + ] + + return list_of_contract_and_freq_tuples + + def _all_keynames_in_library(self) -> list: + return self.parquet.get_all_identifiers_with_data_type(data_type=CONTRACT_COLLECTION) + + def _delete_merged_prices_for_contract_object_with_no_checks_be_careful( + self, futures_contract_object: futuresContract + ): + """ + Delete prices for a given contract object without performing any checks + + WILL THIS WORK IF DOESN'T EXIST? + :param futures_contract_object: + :return: None + """ + + self._delete_prices_at_frequency_for_contract_object_with_no_checks_be_careful( + futures_contract_object, frequency=MIXED_FREQ + ) + + def _delete_prices_at_frequency_for_contract_object_with_no_checks_be_careful( + self, futures_contract_object: futuresContract, frequency: Frequency + ): + log = futures_contract_object.log(self.log) + + ident = from_contract_and_freq_to_key( + contract=futures_contract_object, frequency=frequency + ) + self.parquet.delete_data_given_data_type_and_identifier(data_type=CONTRACT_COLLECTION, identifier=ident) + log.debug( + "Deleted all prices for %s from %s" + % (futures_contract_object.key, str(self)) + ) + + +def from_key_to_freq_and_contract(keyname) -> Tuple[Frequency, futuresContract]: + first_split = keyname.split("/") + if len(first_split) == 1: + frequency = MIXED_FREQ + contract_str = keyname + else: + frequency = Frequency[first_split[0]] + contract_str = first_split[1] + + contract_str_split = contract_str.split(".") + futures_contract = futuresContract(contract_str_split[0], contract_str_split[1]) + + return frequency, futures_contract + + +def from_contract_and_freq_to_key(contract: futuresContract, frequency: Frequency) -> str: + if frequency is MIXED_FREQ: + frequency_str = "" + else: + frequency_str = frequency.name + "/" + + return from_tuple_to_key( + [frequency_str, contract.instrument_code, contract.date_str] + ) + + +def from_tuple_to_key(keytuple) -> str: + return keytuple[0] + keytuple[1] + "." + keytuple[2] diff --git a/sysdata/parquet/parquet_historic_contract_positions.py b/sysdata/parquet/parquet_historic_contract_positions.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/sysdata/parquet/parquet_multiple_prices.py b/sysdata/parquet/parquet_multiple_prices.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/sysdata/parquet/parquet_optimal_positions.py b/sysdata/parquet/parquet_optimal_positions.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/sysdata/parquet/parquet_spotfx_prices.py b/sysdata/parquet/parquet_spotfx_prices.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/sysdata/parquet/parquet_spreads.py b/sysdata/parquet/parquet_spreads.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/sysinit/transfer/backup_arctic_to_parquet.py b/sysinit/transfer/backup_arctic_to_parquet.py index 5adc226dc0..ff2e878ed7 100644 --- a/sysinit/transfer/backup_arctic_to_parquet.py +++ b/sysinit/transfer/backup_arctic_to_parquet.py @@ -4,13 +4,15 @@ from syscore.exceptions import missingData from syscore.pandas.pdutils import check_df_equals, check_ts_equals from syscore.dateutils import CALENDAR_DAYS_IN_YEAR +from syscore.dateutils import DAILY_PRICE_FREQ, HOURLY_FREQ + from sysdata.data_blob import dataBlob from sysdata.parquet.parquet_adjusted_prices import parquetFuturesAdjustedPricesData from sysdata.parquet.parquet_capital import parquetCapitalData +from sysdata.parquet.parquet_futures_per_contract_prices import parquetFuturesContractPriceData from sysdata.csv.csv_futures_contracts import csvFuturesContractData -from sysdata.csv.csv_futures_contract_prices import csvFuturesContractPriceData from sysdata.csv.csv_multiple_prices import csvFuturesMultiplePricesData from sysdata.csv.csv_spot_fx import csvFxPricesData from sysdata.csv.csv_contract_position_data import csvContractPositionData @@ -59,7 +61,7 @@ def backup_arctic_to_parquet(): log = backup_data.log log.debug("Dumping from arctic, mongo to parquet files") - #backup_futures_contract_prices_to_csv(backup_data) + backup_futures_contract_prices_to_parquet(backup_data) #backup_spreads_to_csv(backup_data) #backup_fx_to_csv(backup_data) #backup_multiple_to_csv(backup_data) @@ -77,7 +79,7 @@ def backup_arctic_to_parquet(): def get_data_blob(logname): data = dataBlob( - keep_original_prefix=True, log_name=logname, parquet_store_path=get_parquet_root_directory() + keep_original_prefix=True, log_name=logname ) data.add_class_list( @@ -88,7 +90,7 @@ def get_data_blob(logname): #csvContractPositionData, parquetFuturesAdjustedPricesData, #csvFuturesContractData, - #csvFuturesContractPriceData, + parquetFuturesContractPriceData, #csvFuturesMultiplePricesData, #csvFxPricesData, #csvOptimalPositionData, @@ -151,42 +153,34 @@ def backup_adj_to_parquet_for_instrument(data: dataBlob, instrument_code: str): # Futures contract data -def backup_futures_contract_prices_to_csv(data, ignore_long_expired: bool = True): +def backup_futures_contract_prices_to_parquet(data): instrument_list = ( data.arctic_futures_contract_price.get_list_of_instrument_codes_with_merged_price_data() ) for instrument_code in instrument_list: - backup_futures_contract_prices_for_instrument_to_csv( + backup_futures_contract_prices_for_instrument_to_parquet( data=data, - instrument_code=instrument_code, - ignore_long_expired=ignore_long_expired, + instrument_code=instrument_code ) -def backup_futures_contract_prices_for_instrument_to_csv( - data: dataBlob, instrument_code: str, ignore_long_expired: bool = True +def backup_futures_contract_prices_for_instrument_to_parquet( + data: dataBlob, instrument_code: str ): list_of_contracts = data.arctic_futures_contract_price.contracts_with_merged_price_data_for_instrument_code( instrument_code ) for futures_contract in list_of_contracts: - backup_futures_contract_prices_for_contract_to_csv( + backup_futures_contract_prices_for_contract_to_parquet( data=data, futures_contract=futures_contract, - ignore_long_expired=ignore_long_expired, ) -def backup_futures_contract_prices_for_contract_to_csv( - data: dataBlob, futures_contract: futuresContract, ignore_long_expired: bool = True +def backup_futures_contract_prices_for_contract_to_parquet( + data: dataBlob, futures_contract: futuresContract ): - if ignore_long_expired: - if futures_contract.days_since_expiry() > CALENDAR_DAYS_IN_YEAR: - ## Almost certainly expired, skip - data.log.debug("Skipping expired contract %s" % str(futures_contract)) - - return None arctic_data = ( data.arctic_futures_contract_price.get_merged_prices_for_contract_object( @@ -194,28 +188,43 @@ def backup_futures_contract_prices_for_contract_to_csv( ) ) - csv_data = data.csv_futures_contract_price.get_merged_prices_for_contract_object( - futures_contract + data.parquet_futures_contract_price.write_merged_prices_for_contract_object( + futures_contract, + arctic_data, + ignore_duplication=True, + ) + parquet_data = ( + data.parquet_futures_contract_price.get_merged_prices_for_contract_object( + futures_contract + ) + ) + data.log.debug( + "Written backup .csv of prices for %s was %s now %s" % (str(futures_contract), arctic_data, parquet_data) ) - if check_df_equals(arctic_data, csv_data): - # No update needed, move on - data.log.debug("No prices backup needed for %s" % str(futures_contract)) - else: - # Write backup - try: - data.csv_futures_contract_price.write_merged_prices_for_contract_object( + for frequency in [DAILY_PRICE_FREQ, HOURLY_FREQ]: + arctic_data = ( + data.arctic_futures_contract_price.get_prices_at_frequency_for_contract_object( futures_contract, - arctic_data, - ignore_duplication=True, - ) - data.log.debug( - "Written backup .csv of prices for %s" % str(futures_contract) + frequency=frequency ) - except BaseException: - data.log.warning( - "Problem writing .csv of prices for %s" % str(futures_contract) + ) + + data.parquet_futures_contract_price.write_prices_at_frequency_for_contract_object( + futures_contract_object=futures_contract, + futures_price_data=arctic_data, + frequency=frequency, + ignore_duplication=True + ) + parquet_data = ( + data.parquet_futures_contract_price.get_prices_at_frequency_for_contract_object( + futures_contract, + frequency=frequency ) + ) + data.log.debug( + "Written backup .csv of prices at frequency %s for %s was %s now %s" % (str(frequency), str(futures_contract), arctic_data, parquet_data) + ) # fx From cacff7532569c2edba9f32dcff8d096b46ffd233 Mon Sep 17 00:00:00 2001 From: Rob Carver Date: Fri, 17 Nov 2023 15:35:38 +0000 Subject: [PATCH 089/235] change ident repr for contracts and freq --- sysdata/parquet/parquet_futures_per_contract_prices.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/sysdata/parquet/parquet_futures_per_contract_prices.py b/sysdata/parquet/parquet_futures_per_contract_prices.py index 22c8dfcd53..0ed4115e42 100644 --- a/sysdata/parquet/parquet_futures_per_contract_prices.py +++ b/sysdata/parquet/parquet_futures_per_contract_prices.py @@ -191,7 +191,7 @@ def _delete_prices_at_frequency_for_contract_object_with_no_checks_be_careful( def from_key_to_freq_and_contract(keyname) -> Tuple[Frequency, futuresContract]: - first_split = keyname.split("/") + first_split = keyname.split("@") if len(first_split) == 1: frequency = MIXED_FREQ contract_str = keyname @@ -199,7 +199,7 @@ def from_key_to_freq_and_contract(keyname) -> Tuple[Frequency, futuresContract]: frequency = Frequency[first_split[0]] contract_str = first_split[1] - contract_str_split = contract_str.split(".") + contract_str_split = contract_str.split("#") futures_contract = futuresContract(contract_str_split[0], contract_str_split[1]) return frequency, futures_contract @@ -209,7 +209,7 @@ def from_contract_and_freq_to_key(contract: futuresContract, frequency: Frequenc if frequency is MIXED_FREQ: frequency_str = "" else: - frequency_str = frequency.name + "/" + frequency_str = frequency.name + "@" return from_tuple_to_key( [frequency_str, contract.instrument_code, contract.date_str] @@ -217,4 +217,4 @@ def from_contract_and_freq_to_key(contract: futuresContract, frequency: Frequenc def from_tuple_to_key(keytuple) -> str: - return keytuple[0] + keytuple[1] + "." + keytuple[2] + return keytuple[0] + keytuple[1] + "#" + keytuple[2] From bef9dbee7e846748e0dfd4a86d55432a6bebb068 Mon Sep 17 00:00:00 2001 From: Rob Carver Date: Fri, 17 Nov 2023 16:58:19 +0000 Subject: [PATCH 090/235] per contract prices --- data/tools/contract_comparison.py | 7 +++---- .../arctic_futures_per_contract_prices.py | 2 +- sysdata/pointers.py | 17 ++++++++++++++++- ...ustedprices_from_mongo_multiple_to_mongo.py | 4 ++-- sysinit/futures/clone_data_for_instrument.py | 10 ++++------ .../contract_prices_from_csv_to_arctic.py | 6 ++---- sysinit/futures/create_hourly_and_daily.py | 11 ++++------- ...multiple_and_adjusted_from_csv_to_arctic.py | 4 ++-- ...arcticprices_and_csv_calendars_to_arctic.py | 11 +++-------- sysinit/futures/repocsv_adjusted_prices.py | 4 ++-- .../rollcalendars_from_arcticprices_to_csv.py | 8 +++----- sysproduction/backup_arctic_to_csv.py | 18 ++++++++---------- sysproduction/data/contracts.py | 6 ++---- sysproduction/data/prices.py | 10 ++++------ sysproduction/data/volumes.py | 6 ++---- 15 files changed, 58 insertions(+), 66 deletions(-) diff --git a/data/tools/contract_comparison.py b/data/tools/contract_comparison.py index 62c9ab2189..de66a39d47 100644 --- a/data/tools/contract_comparison.py +++ b/data/tools/contract_comparison.py @@ -1,6 +1,5 @@ -from sysdata.arctic.arctic_futures_per_contract_prices import ( - arcticFuturesContractPriceData, -) + +from sysdata.pointers import parquet_futures_contract_price_data from sysobjects.contracts import futuresContract import pandas as pd @@ -26,7 +25,7 @@ def _create_comparison( instrument_object=instrument_code, contract_date_object=forward_date_str ) - contract_prices = arcticFuturesContractPriceData() + contract_prices = parquet_futures_contract_price_data price_prices = contract_prices.get_merged_prices_for_contract_object( price_contract ) diff --git a/sysdata/arctic/arctic_futures_per_contract_prices.py b/sysdata/arctic/arctic_futures_per_contract_prices.py index ccdf538323..9348797257 100644 --- a/sysdata/arctic/arctic_futures_per_contract_prices.py +++ b/sysdata/arctic/arctic_futures_per_contract_prices.py @@ -6,9 +6,9 @@ from syscore.dateutils import Frequency, MIXED_FREQ from sysdata.arctic.arctic_connection import arcticData +from sysobjects.contracts import listOfFuturesContracts from sysdata.futures.futures_per_contract_prices import ( futuresContractPriceData, - listOfFuturesContracts, ) from sysobjects.futures_per_contract_prices import futuresContractPrices from sysobjects.contracts import futuresContract, get_code_and_id_from_contract_key diff --git a/sysdata/pointers.py b/sysdata/pointers.py index fc626c624b..5a20d08149 100644 --- a/sysdata/pointers.py +++ b/sysdata/pointers.py @@ -3,7 +3,22 @@ from sysdata.parquet.parquet_capital import parquetCapitalData as og_parquetCapitalData from sysdata.arctic.arctic_capital import arcticCapitalData +from sysdata.arctic.arctic_futures_per_contract_prices import arcticFuturesContractPriceData +from sysdata.parquet.parquet_futures_per_contract_prices import parquetFuturesContractPriceData as og_parquetFuturesContractPriceData +from sysdata.data_blob import get_parquet_root_directory +from sysdata.config.production_config import get_production_config + +try: + parquet_root = get_parquet_root_directory(get_production_config()) +except: + ## fine if not using parquet + pass ## TO USE ARCTIC RATHER THAN PARQUET, REPLACE THE og_ with the relevant arctic class parquetFuturesAdjustedPricesData = og_parquetFuturesAdjustedPricesData ## change to arctic if desired -parquetCapitalData = og_parquetCapitalData \ No newline at end of file +parquet_futures_adjusted_price_data = parquetFuturesAdjustedPricesData(parquet_root) ## replace with arcticFuturesContractPriceData() if desired + +parquetCapitalData = og_parquetCapitalData + +parquetFuturesContractPriceData = og_parquetFuturesContractPriceData +parquet_futures_contract_price_data = parquetFuturesContractPriceData(parquet_root) \ No newline at end of file diff --git a/sysinit/futures/adjustedprices_from_mongo_multiple_to_mongo.py b/sysinit/futures/adjustedprices_from_mongo_multiple_to_mongo.py index 55935e031d..bbcb988597 100755 --- a/sysinit/futures/adjustedprices_from_mongo_multiple_to_mongo.py +++ b/sysinit/futures/adjustedprices_from_mongo_multiple_to_mongo.py @@ -6,7 +6,7 @@ """ from syscore.constants import arg_not_supplied from sysdata.arctic.arctic_multiple_prices import arcticFuturesMultiplePricesData -from sysdata.pointers import parquetFuturesAdjustedPricesData +from sysdata.pointers import parquet_futures_adjusted_price_data from sysdata.csv.csv_adjusted_prices import csvFuturesAdjustedPricesData from sysobjects.adjusted_prices import futuresAdjustedPrices @@ -14,7 +14,7 @@ def _get_data_inputs(csv_adj_data_path): arctic_multiple_prices = arcticFuturesMultiplePricesData() - parquet_adjusted_prices = parquetFuturesAdjustedPricesData() + parquet_adjusted_prices = parquet_futures_adjusted_price_data csv_adjusted_prices = csvFuturesAdjustedPricesData(csv_adj_data_path) return arctic_multiple_prices, parquet_adjusted_prices, csv_adjusted_prices diff --git a/sysinit/futures/clone_data_for_instrument.py b/sysinit/futures/clone_data_for_instrument.py index 15bc6cef34..8a7689f6fe 100644 --- a/sysinit/futures/clone_data_for_instrument.py +++ b/sysinit/futures/clone_data_for_instrument.py @@ -1,8 +1,6 @@ -from sysdata.arctic.arctic_futures_per_contract_prices import ( - arcticFuturesContractPriceData, -) +from sysdata.pointers import parquet_futures_contract_price_data from sysdata.arctic.arctic_multiple_prices import arcticFuturesMultiplePricesData -from sysdata.pointers import parquetFuturesAdjustedPricesData +from sysdata.pointers import parquet_futures_adjusted_price_data from sysdata.csv.csv_roll_calendars import csvRollCalendarData from sysdata.csv.csv_multiple_prices import csvFuturesMultiplePricesData from sysdata.csv.csv_adjusted_prices import csvFuturesAdjustedPricesData @@ -13,9 +11,9 @@ from sysobjects.adjusted_prices import futuresAdjustedPrices -db_data_individual_prices = arcticFuturesContractPriceData() +db_data_individual_prices = parquet_futures_contract_price_data db_data_multiple_prices = arcticFuturesMultiplePricesData() -db_data_adjusted_prices = parquetFuturesAdjustedPricesData() +db_data_adjusted_prices = parquet_futures_adjusted_price_data csv_roll_calendar = csvRollCalendarData() csv_multiple = csvFuturesMultiplePricesData() diff --git a/sysinit/futures/contract_prices_from_csv_to_arctic.py b/sysinit/futures/contract_prices_from_csv_to_arctic.py index 1d23c3b2b4..e9c30f9f0a 100644 --- a/sysinit/futures/contract_prices_from_csv_to_arctic.py +++ b/sysinit/futures/contract_prices_from_csv_to_arctic.py @@ -1,9 +1,7 @@ from syscore.constants import arg_not_supplied from sysdata.csv.csv_futures_contract_prices import csvFuturesContractPriceData -from sysdata.arctic.arctic_futures_per_contract_prices import ( - arcticFuturesContractPriceData, -) +from sysdata.pointers import parquet_futures_contract_price_data from sysobjects.contracts import futuresContract @@ -29,7 +27,7 @@ def init_arctic_with_csv_futures_contract_prices_for_code( ): print(instrument_code) csv_prices = csvFuturesContractPriceData(datapath, config=csv_config) - arctic_prices = arcticFuturesContractPriceData() + arctic_prices = parquet_futures_contract_price_data print("Getting .csv prices may take some time") csv_price_dict = csv_prices.get_merged_prices_for_instrument(instrument_code) diff --git a/sysinit/futures/create_hourly_and_daily.py b/sysinit/futures/create_hourly_and_daily.py index 2df1d7fc7b..6fb212f613 100644 --- a/sysinit/futures/create_hourly_and_daily.py +++ b/sysinit/futures/create_hourly_and_daily.py @@ -3,13 +3,10 @@ closing_date_rows_in_pd_object, get_intraday_pdf_at_frequency, ) -from sysdata.arctic.arctic_futures_per_contract_prices import ( - arcticFuturesContractPriceData, -) - +from sysdata.pointers import parquet_futures_contract_price_data def write_split_data_for_instrument(instrument_code): - a = arcticFuturesContractPriceData() + a = parquet_futures_contract_price_data list_of_contracts = a.contracts_with_merged_price_data_for_instrument_code( instrument_code ) @@ -38,10 +35,10 @@ def write_split_data_for_instrument(instrument_code): if __name__ == "__main__": input( - "This script will delete any existing hourly and daily data in arctic, and replace with hourly and data inferred from 'merged' (legacy) data. CTL-C to abort" + "This script will delete any existing hourly and daily data in parquet, and replace with hourly and data inferred from 'merged' (legacy) data. CTL-C to abort" ) - a = arcticFuturesContractPriceData() + a = parquet_futures_contract_price_data instrument_list = a.get_list_of_instrument_codes_with_merged_price_data() for instrument_code in instrument_list: print(instrument_code) diff --git a/sysinit/futures/multiple_and_adjusted_from_csv_to_arctic.py b/sysinit/futures/multiple_and_adjusted_from_csv_to_arctic.py index 2d5725a109..507550eda4 100644 --- a/sysinit/futures/multiple_and_adjusted_from_csv_to_arctic.py +++ b/sysinit/futures/multiple_and_adjusted_from_csv_to_arctic.py @@ -2,7 +2,7 @@ from sysdata.csv.csv_multiple_prices import csvFuturesMultiplePricesData from sysdata.csv.csv_adjusted_prices import csvFuturesAdjustedPricesData from sysdata.arctic.arctic_multiple_prices import arcticFuturesMultiplePricesData -from sysdata.pointers import parquetFuturesAdjustedPricesData +from sysdata.pointers import parquet_futures_adjusted_price_data def init_arctic_with_csv_futures_contract_prices( multiple_price_datapath=arg_not_supplied, adj_price_datapath=arg_not_supplied @@ -38,7 +38,7 @@ def init_arctic_with_csv_prices_for_code( ) csv_adj_data = csvFuturesAdjustedPricesData(adj_price_datapath) - parquet_adj_data = parquetFuturesAdjustedPricesData() + parquet_adj_data = parquet_futures_adjusted_price_data adj_prices = csv_adj_data.get_adjusted_prices(instrument_code) parquet_adj_data.add_adjusted_prices( diff --git a/sysinit/futures/multipleprices_from_arcticprices_and_csv_calendars_to_arctic.py b/sysinit/futures/multipleprices_from_arcticprices_and_csv_calendars_to_arctic.py index f26ca5a226..3c42e069f4 100755 --- a/sysinit/futures/multipleprices_from_arcticprices_and_csv_calendars_to_arctic.py +++ b/sysinit/futures/multipleprices_from_arcticprices_and_csv_calendars_to_arctic.py @@ -16,9 +16,7 @@ import datetime import pandas as pd -from sysdata.arctic.arctic_futures_per_contract_prices import ( - arcticFuturesContractPriceData, -) +from sysdata.pointers import parquet_futures_contract_price_data from sysobjects.rolls import rollParameters, contractDateWithRollParameters from sysobjects.contract_dates_and_expiries import contractDate @@ -26,16 +24,13 @@ from sysdata.csv.csv_multiple_prices import csvFuturesMultiplePricesData from sysdata.csv.csv_roll_parameters import csvRollParametersData from sysdata.arctic.arctic_multiple_prices import arcticFuturesMultiplePricesData -from sysdata.arctic.arctic_futures_per_contract_prices import ( - arcticFuturesContractPriceData, -) from sysinit.futures.build_roll_calendars import adjust_to_price_series from sysobjects.multiple_prices import futuresMultiplePrices def _get_data_inputs(csv_roll_data_path, csv_multiple_data_path): csv_roll_calendars = csvRollCalendarData(csv_roll_data_path) - arctic_individual_futures_prices = arcticFuturesContractPriceData() + arctic_individual_futures_prices = parquet_futures_contract_price_data arctic_multiple_prices = arcticFuturesMultiplePricesData() csv_multiple_prices = csvFuturesMultiplePricesData(csv_multiple_data_path) @@ -144,7 +139,7 @@ def process_multiple_prices_single_instrument( def adjust_roll_calendar(instrument_code, roll_calendar): - arctic_prices_per_contract = arcticFuturesContractPriceData() + arctic_prices_per_contract = parquet_futures_contract_price_data print("Getting prices to adjust roll calendar") dict_of_prices = arctic_prices_per_contract.get_merged_prices_for_instrument( instrument_code diff --git a/sysinit/futures/repocsv_adjusted_prices.py b/sysinit/futures/repocsv_adjusted_prices.py index 9a0abf2974..353413951b 100755 --- a/sysinit/futures/repocsv_adjusted_prices.py +++ b/sysinit/futures/repocsv_adjusted_prices.py @@ -2,12 +2,12 @@ Copy from csv repo files to arctic for adjusted prices """ from syscore.constants import arg_not_supplied -from sysdata.pointers import parquetFuturesAdjustedPricesData +from sysdata.pointers import parquet_futures_adjusted_price_data from sysdata.csv.csv_adjusted_prices import csvFuturesAdjustedPricesData if __name__ == "__main__": input("Will overwrite existing prices are you sure?! CTL-C to abort") - parquet_adjusted_prices = parquetFuturesAdjustedPricesData() + parquet_adjusted_prices = parquet_futures_adjusted_price_data ## MODIFY PATH TO USE SOMETHING OTHER THAN DEFAULT csv_adj_datapath = arg_not_supplied diff --git a/sysinit/futures/rollcalendars_from_arcticprices_to_csv.py b/sysinit/futures/rollcalendars_from_arcticprices_to_csv.py index 4dff8c53ca..418281381c 100755 --- a/sysinit/futures/rollcalendars_from_arcticprices_to_csv.py +++ b/sysinit/futures/rollcalendars_from_arcticprices_to_csv.py @@ -1,9 +1,7 @@ from syscore.interactive.input import true_if_answer_is_yes from syscore.constants import arg_not_supplied -from sysdata.arctic.arctic_futures_per_contract_prices import ( - arcticFuturesContractPriceData, -) +from sysdata.pointers import parquet_futures_contract_price_data from sysobjects.rolls import rollParameters from sysobjects.roll_calendars import rollCalendar from sysdata.csv.csv_roll_calendars import csvRollCalendarData @@ -35,7 +33,7 @@ def build_and_write_roll_calendar( print("Writing to %s" % output_datapath) if input_prices is arg_not_supplied: - prices = arcticFuturesContractPriceData() + prices = parquet_futures_contract_price_data else: prices = input_prices @@ -99,7 +97,7 @@ def check_saved_roll_calendar( roll_calendar = csv_roll_calendars.get_roll_calendar(instrument_code) if input_prices is arg_not_supplied: - prices = arcticFuturesContractPriceData() + prices = parquet_futures_contract_price_data else: prices = input_prices diff --git a/sysproduction/backup_arctic_to_csv.py b/sysproduction/backup_arctic_to_csv.py index 06f7232d07..24a2da5c6e 100644 --- a/sysproduction/backup_arctic_to_csv.py +++ b/sysproduction/backup_arctic_to_csv.py @@ -24,13 +24,11 @@ from sysdata.csv.csv_roll_state_storage import csvRollStateData from sysdata.csv.csv_spreads import csvSpreadsForInstrumentData -from sysdata.arctic.arctic_futures_per_contract_prices import ( - arcticFuturesContractPriceData, -) -from sysdata.arctic.arctic_multiple_prices import arcticFuturesMultiplePricesData from sysdata.pointers import parquetFuturesAdjustedPricesData from sysdata.pointers import parquetCapitalData +from sysdata.pointers import parquetFuturesContractPriceData +from sysdata.arctic.arctic_multiple_prices import arcticFuturesMultiplePricesData from sysdata.arctic.arctic_spotfx_prices import arcticFxPricesData from sysdata.arctic.arctic_spreads import arcticSpreadsForInstrumentData from sysdata.arctic.arctic_historic_strategy_positions import arcticStrategyPositionData @@ -150,7 +148,7 @@ def get_data_and_create_csv_directories(logname): [ parquetCapitalData, parquetFuturesAdjustedPricesData, - arcticFuturesContractPriceData, + parquetFuturesContractPriceData, arcticFuturesMultiplePricesData, arcticFxPricesData, arcticSpreadsForInstrumentData, @@ -189,7 +187,7 @@ def backup_futures_contract_prices_to_csv(data, ignore_long_expired: bool = True def backup_futures_contract_prices_for_instrument_to_csv( data: dataBlob, instrument_code: str, ignore_long_expired: bool = True ): - list_of_contracts = data.arctic_futures_contract_price.contracts_with_merged_price_data_for_instrument_code( + list_of_contracts = data.parquet_futures_contract_price.contracts_with_merged_price_data_for_instrument_code( instrument_code ) @@ -211,8 +209,8 @@ def backup_futures_contract_prices_for_contract_to_csv( return None - arctic_data = ( - data.arctic_futures_contract_price.get_merged_prices_for_contract_object( + parquet_data = ( + data.parquet_futures_contract_price.get_merged_prices_for_contract_object( futures_contract ) ) @@ -221,7 +219,7 @@ def backup_futures_contract_prices_for_contract_to_csv( futures_contract ) - if check_df_equals(arctic_data, csv_data): + if check_df_equals(parquet_data, csv_data): # No update needed, move on data.log.debug("No prices backup needed for %s" % str(futures_contract)) else: @@ -229,7 +227,7 @@ def backup_futures_contract_prices_for_contract_to_csv( try: data.csv_futures_contract_price.write_merged_prices_for_contract_object( futures_contract, - arctic_data, + parquet_data, ignore_duplication=True, ) data.log.debug( diff --git a/sysproduction/data/contracts.py b/sysproduction/data/contracts.py index c0d9791eec..c7f8faa480 100644 --- a/sysproduction/data/contracts.py +++ b/sysproduction/data/contracts.py @@ -2,9 +2,7 @@ from syscore.exceptions import missingData, ContractNotFound -from sysdata.arctic.arctic_futures_per_contract_prices import ( - arcticFuturesContractPriceData, -) +from sysdata.pointers import parquetFuturesContractPriceData from sysdata.arctic.arctic_multiple_prices import arcticFuturesMultiplePricesData from sysdata.csv.csv_roll_parameters import csvRollParametersData from sysdata.mongodb.mongo_futures_contracts import mongoFuturesContractData @@ -33,7 +31,7 @@ class dataContracts(productionDataLayerGeneric): def _add_required_classes_to_data(self, data) -> dataBlob: data.add_class_list( [ - arcticFuturesContractPriceData, + parquetFuturesContractPriceData, csvRollParametersData, arcticFuturesMultiplePricesData, mongoFuturesContractData, diff --git a/sysproduction/data/prices.py b/sysproduction/data/prices.py index de9df8d294..e5d9ac6b62 100644 --- a/sysproduction/data/prices.py +++ b/sysproduction/data/prices.py @@ -13,10 +13,8 @@ ) from sysobjects.spreads import spreadsForInstrument -from sysdata.arctic.arctic_futures_per_contract_prices import ( - arcticFuturesContractPriceData, - futuresContractPrices, -) +from sysdata.pointers import parquetFuturesContractPriceData +from sysobjects.futures_per_contract_prices import futuresContractPrices from sysdata.arctic.arctic_multiple_prices import ( arcticFuturesMultiplePricesData, futuresMultiplePrices, @@ -54,7 +52,7 @@ class diagPrices(productionDataLayerGeneric): def _add_required_classes_to_data(self, data) -> dataBlob: data.add_class_list( [ - arcticFuturesContractPriceData, + parquetFuturesContractPriceData, parquetFuturesAdjustedPricesData, arcticFuturesMultiplePricesData, mongoFuturesContractData, @@ -249,7 +247,7 @@ class updatePrices(productionDataLayerGeneric): def _add_required_classes_to_data(self, data) -> dataBlob: data.add_class_list( [ - arcticFuturesContractPriceData, + parquetFuturesContractPriceData, arcticFuturesMultiplePricesData, mongoFuturesContractData, parquetFuturesAdjustedPricesData, diff --git a/sysproduction/data/volumes.py b/sysproduction/data/volumes.py index 93ba1af105..1a5cf11a20 100644 --- a/sysproduction/data/volumes.py +++ b/sysproduction/data/volumes.py @@ -1,9 +1,7 @@ import datetime as datetime import pandas as pd from syscore.exceptions import missingData -from sysdata.arctic.arctic_futures_per_contract_prices import ( - arcticFuturesContractPriceData, -) +from sysdata.pointers import parquetFuturesContractPriceData from sysdata.futures.futures_per_contract_prices import futuresContractPriceData from sysobjects.contracts import futuresContract from sysdata.data_blob import dataBlob @@ -18,7 +16,7 @@ class diagVolumes(productionDataLayerGeneric): def _add_required_classes_to_data(self, data) -> dataBlob: - data.add_class_object(arcticFuturesContractPriceData) + data.add_class_object(parquetFuturesContractPriceData) return data @property From 6856479949fcd55dd29b5f31fb46d35f50d8936a Mon Sep 17 00:00:00 2001 From: Rob Carver Date: Mon, 20 Nov 2023 10:34:13 +0000 Subject: [PATCH 091/235] much cleaner way to switch arctic/parquet --- sysdata/data_blob.py | 19 ++- sysdata/pointers.py | 4 - sysdata/sim/db_futures_sim_data.py | 39 ++++- ...stedprices_from_mongo_multiple_to_mongo.py | 4 +- sysinit/futures/clone_data_for_instrument.py | 2 + sysproduction/backup_arctic_to_csv.py | 157 ++++++++---------- sysproduction/data/capital.py | 5 +- sysproduction/data/contracts.py | 16 +- sysproduction/data/control_process.py | 5 +- sysproduction/data/currency_data.py | 5 +- sysproduction/data/instruments.py | 12 +- sysproduction/data/optimal_positions.py | 5 +- sysproduction/data/orders.py | 24 +-- sysproduction/data/positions.py | 14 +- sysproduction/data/prices.py | 24 +-- sysproduction/data/production_data_objects.py | 88 ++++++++++ sysproduction/data/sim_data.py | 19 +-- sysproduction/data/volumes.py | 4 +- 18 files changed, 259 insertions(+), 187 deletions(-) create mode 100644 sysproduction/data/production_data_objects.py diff --git a/sysdata/data_blob.py b/sysdata/data_blob.py index 485f915e75..ba095d0b3d 100644 --- a/sysdata/data_blob.py +++ b/sysdata/data_blob.py @@ -80,13 +80,13 @@ def __init__( def __repr__(self): return "dataBlob with elements: %s" % ",".join(self._attr_list) - def add_class_list(self, class_list: list): + def add_class_list(self, class_list: list, use_prefix: str = arg_not_supplied): for class_object in class_list: - self.add_class_object(class_object) + self.add_class_object(class_object, use_prefix=use_prefix) - def add_class_object(self, class_object): + def add_class_object(self, class_object, use_prefix: str = arg_not_supplied): class_name = get_class_name(class_object) - attr_name = self._get_new_name(class_name) + attr_name = self._get_new_name(class_name, use_prefix=use_prefix) if not self._already_existing_class_name(attr_name): resolved_instance = self._get_resolved_instance_of_class(class_object) self._resolve_names_and_add(resolved_instance, class_name) @@ -236,10 +236,11 @@ def _resolve_names_and_add(self, resolved_instance, class_name: str): attr_name = self._get_new_name(class_name) self._add_new_class_with_new_name(resolved_instance, attr_name) - def _get_new_name(self, class_name: str) -> str: + def _get_new_name(self, class_name: str, use_prefix: str = arg_not_supplied) -> str: split_up_name = camel_case_split(class_name) attr_name = identifying_name( - split_up_name, keep_original_prefix=self._keep_original_prefix + split_up_name, keep_original_prefix=self._keep_original_prefix, + use_prefix=use_prefix ) return attr_name @@ -382,7 +383,7 @@ def get_parquet_root_directory(config): return get_resolved_pathname(path) -def identifying_name(split_up_name: list, keep_original_prefix=False) -> str: +def identifying_name(split_up_name: list, keep_original_prefix: bool=False, use_prefix: str = arg_not_supplied) -> str: """ Turns sourceClassNameData into broker_class_name or db_class_name @@ -400,7 +401,9 @@ def identifying_name(split_up_name: list, keep_original_prefix=False) -> str: except BaseException: raise Exception("Get_data strings only work if class name ends in ...Data") - if keep_original_prefix: + if use_prefix is not arg_not_supplied: + source_label = use_prefix + elif keep_original_prefix: source_label = original_source_label else: try: diff --git a/sysdata/pointers.py b/sysdata/pointers.py index 5a20d08149..512afa7de5 100644 --- a/sysdata/pointers.py +++ b/sysdata/pointers.py @@ -1,10 +1,7 @@ -from sysdata.parquet.parquet_adjusted_prices import parquetFuturesAdjustedPricesData as og_parquetFuturesAdjustedPricesData from sysdata.arctic.arctic_adjusted_prices import arcticFuturesAdjustedPricesData -from sysdata.parquet.parquet_capital import parquetCapitalData as og_parquetCapitalData from sysdata.arctic.arctic_capital import arcticCapitalData from sysdata.arctic.arctic_futures_per_contract_prices import arcticFuturesContractPriceData -from sysdata.parquet.parquet_futures_per_contract_prices import parquetFuturesContractPriceData as og_parquetFuturesContractPriceData from sysdata.data_blob import get_parquet_root_directory from sysdata.config.production_config import get_production_config @@ -15,7 +12,6 @@ pass ## TO USE ARCTIC RATHER THAN PARQUET, REPLACE THE og_ with the relevant arctic class -parquetFuturesAdjustedPricesData = og_parquetFuturesAdjustedPricesData ## change to arctic if desired parquet_futures_adjusted_price_data = parquetFuturesAdjustedPricesData(parquet_root) ## replace with arcticFuturesContractPriceData() if desired parquetCapitalData = og_parquetCapitalData diff --git a/sysdata/sim/db_futures_sim_data.py b/sysdata/sim/db_futures_sim_data.py index 891636e506..9f120061f9 100644 --- a/sysdata/sim/db_futures_sim_data.py +++ b/sysdata/sim/db_futures_sim_data.py @@ -5,7 +5,7 @@ from syscore.constants import arg_not_supplied -from sysdata.pointers import parquetFuturesAdjustedPricesData +from sysdata.parquet.parquet_adjusted_prices import parquetFuturesAdjustedPricesData from sysdata.arctic.arctic_multiple_prices import arcticFuturesMultiplePricesData from sysdata.arctic.arctic_spotfx_prices import arcticFxPricesData from sysdata.csv.csv_instrument_data import csvFuturesInstrumentData @@ -26,12 +26,12 @@ def __init__( data = dataBlob( log=log, class_list=[ - parquetFuturesAdjustedPricesData, - arcticFuturesMultiplePricesData, - arcticFxPricesData, - csvFuturesInstrumentData, - csvRollParametersData, - mongoSpreadCostData, + get_class_for_data_type(FUTURES_ADJUSTED_PRICE_DATA), + get_class_for_data_type(FUTURES_MULTIPLE_PRICE_DATA), + get_class_for_data_type(FX_DATA), + get_class_for_data_type(FUTURES_INSTRUMENT_DATA), + get_class_for_data_type(ROLL_PARAMETERS_DATA), + get_class_for_data_type(SPREAD_DATA) ], ) @@ -43,6 +43,31 @@ def __repr__(self): ) +FUTURES_MULTIPLE_PRICE_DATA = "futures_multiple_price_data" +FUTURES_ADJUSTED_PRICE_DATA = "futures_adjusted_price_data" +CAPITAL_DATA = "capital_data" +FX_DATA = "fx_data" +ROLL_PARAMETERS_DATA = "roll_parameters_data" +FUTURES_INSTRUMENT_DATA = "futures_instrument_data" +SPREAD_DATA = "spread_data" + +def get_class_for_data_type(data_type:str): + + return use_sim_classes[data_type] + +use_sim_classes = { + FX_DATA: arcticFxPricesData, + ROLL_PARAMETERS_DATA: csvRollParametersData, + FUTURES_INSTRUMENT_DATA: csvFuturesInstrumentData, + + FUTURES_MULTIPLE_PRICE_DATA: arcticFuturesMultiplePricesData, + FUTURES_ADJUSTED_PRICE_DATA: parquetFuturesAdjustedPricesData, + SPREAD_DATA: mongoSpreadCostData +} + + + + if __name__ == "__main__": import doctest diff --git a/sysinit/futures/adjustedprices_from_mongo_multiple_to_mongo.py b/sysinit/futures/adjustedprices_from_mongo_multiple_to_mongo.py index bbcb988597..96663744ef 100755 --- a/sysinit/futures/adjustedprices_from_mongo_multiple_to_mongo.py +++ b/sysinit/futures/adjustedprices_from_mongo_multiple_to_mongo.py @@ -6,7 +6,7 @@ """ from syscore.constants import arg_not_supplied from sysdata.arctic.arctic_multiple_prices import arcticFuturesMultiplePricesData -from sysdata.pointers import parquet_futures_adjusted_price_data +from sysproduction.data.production_data_objects import get_class_for_data_type, FUTURES_ADJUSTED_PRICE_DATA from sysdata.csv.csv_adjusted_prices import csvFuturesAdjustedPricesData from sysobjects.adjusted_prices import futuresAdjustedPrices @@ -14,7 +14,7 @@ def _get_data_inputs(csv_adj_data_path): arctic_multiple_prices = arcticFuturesMultiplePricesData() - parquet_adjusted_prices = parquet_futures_adjusted_price_data + parquet_adjusted_prices = get_class_for_data_type(FUTURES_ADJUSTED_PRICE_DATA) csv_adjusted_prices = csvFuturesAdjustedPricesData(csv_adj_data_path) return arctic_multiple_prices, parquet_adjusted_prices, csv_adjusted_prices diff --git a/sysinit/futures/clone_data_for_instrument.py b/sysinit/futures/clone_data_for_instrument.py index 8a7689f6fe..96a1b2ab4f 100644 --- a/sysinit/futures/clone_data_for_instrument.py +++ b/sysinit/futures/clone_data_for_instrument.py @@ -5,6 +5,8 @@ from sysdata.csv.csv_multiple_prices import csvFuturesMultiplePricesData from sysdata.csv.csv_adjusted_prices import csvFuturesAdjustedPricesData + + from sysobjects.contracts import futuresContract from syscore.dateutils import DAILY_PRICE_FREQ, HOURLY_FREQ from sysobjects.multiple_prices import futuresMultiplePrices diff --git a/sysproduction/backup_arctic_to_csv.py b/sysproduction/backup_arctic_to_csv.py index 24a2da5c6e..6162505051 100644 --- a/sysproduction/backup_arctic_to_csv.py +++ b/sysproduction/backup_arctic_to_csv.py @@ -24,25 +24,6 @@ from sysdata.csv.csv_roll_state_storage import csvRollStateData from sysdata.csv.csv_spreads import csvSpreadsForInstrumentData -from sysdata.pointers import parquetFuturesAdjustedPricesData -from sysdata.pointers import parquetCapitalData -from sysdata.pointers import parquetFuturesContractPriceData - -from sysdata.arctic.arctic_multiple_prices import arcticFuturesMultiplePricesData -from sysdata.arctic.arctic_spotfx_prices import arcticFxPricesData -from sysdata.arctic.arctic_spreads import arcticSpreadsForInstrumentData -from sysdata.arctic.arctic_historic_strategy_positions import arcticStrategyPositionData -from sysdata.arctic.arctic_historic_contract_positions import arcticContractPositionData -from sysdata.arctic.arctic_optimal_positions import arcticOptimalPositionData - -from sysdata.mongodb.mongo_futures_contracts import mongoFuturesContractData -from sysdata.mongodb.mongo_historic_orders import ( - mongoBrokerHistoricOrdersData, - mongoContractHistoricOrdersData, - mongoStrategyHistoricOrdersData, -) -from sysdata.mongodb.mongo_spread_costs import mongoSpreadCostData -from sysdata.mongodb.mongo_roll_state_storage import mongoRollStateData from sysobjects.contracts import futuresContract from sysobjects.production.tradeable_object import instrumentStrategy @@ -50,11 +31,12 @@ from sysproduction.data.directories import get_csv_backup_directory, get_csv_dump_dir from sysproduction.data.strategies import get_list_of_strategies +from sysproduction.data.production_data_objects import * -def backup_arctic_to_csv(): - data = dataBlob(log_name="backup_arctic_to_csv") +def backup_db_to_csv(): + data = dataBlob(log_name="backup_db_to_csv") backup_object = backupArcticToCsv(data) - backup_object.backup_arctic_to_csv() + backup_object.backup_db_to_csv() return None @@ -70,7 +52,7 @@ class backupArcticToCsv: def __init__(self, data): self.data = data - def backup_arctic_to_csv(self): + def backup_db_to_csv(self): backup_data = get_data_and_create_csv_directories(self.data.log_name) log = self.data.log @@ -121,7 +103,7 @@ def get_data_and_create_csv_directories(logname): os.makedirs(dir_name) data = dataBlob( - csv_data_paths=class_paths, keep_original_prefix=True, log_name=logname + csv_data_paths=class_paths, log_name=logname ) data.add_class_list( @@ -142,26 +124,29 @@ def get_data_and_create_csv_directories(logname): csvStrategyHistoricOrdersData, csvStrategyPositionData, ] + , use_prefix="csv" ) data.add_class_list( [ - parquetCapitalData, - parquetFuturesAdjustedPricesData, - parquetFuturesContractPriceData, - arcticFuturesMultiplePricesData, - arcticFxPricesData, - arcticSpreadsForInstrumentData, - mongoBrokerHistoricOrdersData, - mongoContractHistoricOrdersData, - arcticContractPositionData, - mongoFuturesContractData, - arcticOptimalPositionData, - mongoRollStateData, - mongoSpreadCostData, - mongoStrategyHistoricOrdersData, - arcticStrategyPositionData, - ] + get_class_for_data_type(CAPITAL_DATA), + get_class_for_data_type(FUTURES_ADJUSTED_PRICE_DATA), + get_class_for_data_type(FUTURES_CONTRACT_PRICE_DATA), + get_class_for_data_type(FUTURES_MULTIPLE_PRICE_DATA), + get_class_for_data_type(FX_DATA), + get_class_for_data_type(SPREAD_DATA), + get_class_for_data_type(BROKER_HISTORIC_ORDERS_DATA), + get_class_for_data_type(CONTRACT_HISTORIC_ORDERS_DATA), + get_class_for_data_type(STRATEGY_HISTORIC_ORDERS_DATA), + get_class_for_data_type(CONTRACT_POSITION_DATA), + get_class_for_data_type(STRATEGY_POSITION_DATA), + get_class_for_data_type(FUTURES_CONTRACT_DATA), + get_class_for_data_type(OPTIMAL_POSITION_DATA), + get_class_for_data_type(ROLL_STATE_DATA), + get_class_for_data_type(SPREAD_DATA) + + ], + use_prefix="db" ) return data @@ -174,7 +159,7 @@ def get_data_and_create_csv_directories(logname): # Futures contract data def backup_futures_contract_prices_to_csv(data, ignore_long_expired: bool = True): instrument_list = ( - data.arctic_futures_contract_price.get_list_of_instrument_codes_with_merged_price_data() + data.db_futures_contract_price.get_list_of_instrument_codes_with_merged_price_data() ) for instrument_code in instrument_list: backup_futures_contract_prices_for_instrument_to_csv( @@ -187,7 +172,7 @@ def backup_futures_contract_prices_to_csv(data, ignore_long_expired: bool = True def backup_futures_contract_prices_for_instrument_to_csv( data: dataBlob, instrument_code: str, ignore_long_expired: bool = True ): - list_of_contracts = data.parquet_futures_contract_price.contracts_with_merged_price_data_for_instrument_code( + list_of_contracts = data.db_futures_contract_price.contracts_with_merged_price_data_for_instrument_code( instrument_code ) @@ -209,8 +194,8 @@ def backup_futures_contract_prices_for_contract_to_csv( return None - parquet_data = ( - data.parquet_futures_contract_price.get_merged_prices_for_contract_object( + db_data = ( + data.db_futures_contract_price.get_merged_prices_for_contract_object( futures_contract ) ) @@ -219,7 +204,7 @@ def backup_futures_contract_prices_for_contract_to_csv( futures_contract ) - if check_df_equals(parquet_data, csv_data): + if check_df_equals(db_data, csv_data): # No update needed, move on data.log.debug("No prices backup needed for %s" % str(futures_contract)) else: @@ -227,7 +212,7 @@ def backup_futures_contract_prices_for_contract_to_csv( try: data.csv_futures_contract_price.write_merged_prices_for_contract_object( futures_contract, - parquet_data, + db_data, ignore_duplication=True, ) data.log.debug( @@ -241,17 +226,17 @@ def backup_futures_contract_prices_for_contract_to_csv( # fx def backup_fx_to_csv(data): - fx_codes = data.arctic_fx_prices.get_list_of_fxcodes() + fx_codes = data.db_fx_prices.get_list_of_fxcodes() for fx_code in fx_codes: - arctic_data = data.arctic_fx_prices.get_fx_prices(fx_code) + db_data = data.db_fx_prices.get_fx_prices(fx_code) csv_data = data.csv_fx_prices.get_fx_prices(fx_code) - if check_ts_equals(arctic_data, csv_data): + if check_ts_equals(db_data, csv_data): data.log.debug("No fx backup needed for %s" % fx_code) else: # Write backup try: data.csv_fx_prices.add_fx_prices( - fx_code, arctic_data, ignore_duplication=True + fx_code, db_data, ignore_duplication=True ) data.log.debug("Written .csv backup for %s" % fx_code) except BaseException: @@ -259,24 +244,24 @@ def backup_fx_to_csv(data): def backup_multiple_to_csv(data): - instrument_list = data.arctic_futures_multiple_prices.get_list_of_instruments() + instrument_list = data.db_futures_multiple_prices.get_list_of_instruments() for instrument_code in instrument_list: backup_multiple_to_csv_for_instrument(data, instrument_code) def backup_multiple_to_csv_for_instrument(data, instrument_code: str): - arctic_data = data.arctic_futures_multiple_prices.get_multiple_prices( + db_data = data.db_futures_multiple_prices.get_multiple_prices( instrument_code ) csv_data = data.csv_futures_multiple_prices.get_multiple_prices(instrument_code) - if check_df_equals(arctic_data, csv_data): + if check_df_equals(db_data, csv_data): data.log.debug("No multiple prices backup needed for %s" % instrument_code) pass else: try: data.csv_futures_multiple_prices.add_multiple_prices( - instrument_code, arctic_data, ignore_duplication=True + instrument_code, db_data, ignore_duplication=True ) data.log.debug( "Written .csv backup multiple prices for %s" % instrument_code @@ -288,24 +273,24 @@ def backup_multiple_to_csv_for_instrument(data, instrument_code: str): def backup_adj_to_csv(data): - instrument_list = data.parquet_futures_adjusted_prices.get_list_of_instruments() + instrument_list = data.db_futures_adjusted_prices.get_list_of_instruments() for instrument_code in instrument_list: backup_adj_to_csv_for_instrument(data, instrument_code) def backup_adj_to_csv_for_instrument(data: dataBlob, instrument_code: str): - arctic_data = data.parquet_futures_adjusted_prices.get_adjusted_prices( + db_data = data.db_futures_adjusted_prices.get_adjusted_prices( instrument_code ) csv_data = data.csv_futures_adjusted_prices.get_adjusted_prices(instrument_code) - if check_ts_equals(arctic_data, csv_data): + if check_ts_equals(db_data, csv_data): data.log.debug("No adjusted prices backup needed for %s" % instrument_code) pass else: try: data.csv_futures_adjusted_prices.add_adjusted_prices( - instrument_code, arctic_data, ignore_duplication=True + instrument_code, db_data, ignore_duplication=True ) data.log.debug( "Written .csv backup for adjusted prices %s" % instrument_code @@ -317,22 +302,22 @@ def backup_adj_to_csv_for_instrument(data: dataBlob, instrument_code: str): def backup_spreads_to_csv(data: dataBlob): - instrument_list = data.arctic_spreads_for_instrument.get_list_of_instruments() + instrument_list = data.db_spreads_for_instrument.get_list_of_instruments() for instrument_code in instrument_list: backup_spreads_to_csv_for_instrument(data, instrument_code) def backup_spreads_to_csv_for_instrument(data: dataBlob, instrument_code: str): - arctic_data = data.arctic_spreads_for_instrument.get_spreads(instrument_code) + db_data = data.db_spreads_for_instrument.get_spreads(instrument_code) csv_data = data.csv_spreads_for_instrument.get_spreads(instrument_code) - if check_ts_equals(arctic_data, csv_data): + if check_ts_equals(db_data, csv_data): data.log.debug("No spreads backup needed for %s" % instrument_code) pass else: try: data.csv_spreads_for_instrument.add_spreads( - instrument_code, arctic_data, ignore_duplication=True + instrument_code, db_data, ignore_duplication=True ) data.log.debug("Written .csv backup for spreads %s" % instrument_code) except BaseException: @@ -343,24 +328,24 @@ def backup_spreads_to_csv_for_instrument(data: dataBlob, instrument_code: str): def backup_contract_position_data(data): instrument_list = ( - data.arctic_contract_position.get_list_of_instruments_with_any_position() + data.db_contract_position.get_list_of_instruments_with_any_position() ) for instrument_code in instrument_list: contract_list = ( - data.arctic_contract_position.get_list_of_contracts_for_instrument_code( + data.db_contract_position.get_list_of_contracts_for_instrument_code( instrument_code ) ) for contract in contract_list: try: - arctic_data = data.arctic_contract_position.get_position_as_series_for_contract_object( + db_data = data.db_contract_position.get_position_as_series_for_contract_object( contract ) except missingData: print("No data to write to .csv") else: data.csv_contract_position.overwrite_position_series_for_contract_object_without_checking( - contract, arctic_data + contract, db_data ) data.log.debug( "Backed up %s %s contract position data" % (instrument_code, contract) @@ -370,7 +355,7 @@ def backup_contract_position_data(data): def backup_strategy_position_data(data): strategy_list = get_list_of_strategies(data) instrument_list = ( - data.arctic_contract_position.get_list_of_instruments_with_any_position() + data.db_contract_position.get_list_of_instruments_with_any_position() ) for strategy_name in strategy_list: for instrument_code in instrument_list: @@ -378,13 +363,13 @@ def backup_strategy_position_data(data): strategy_name=strategy_name, instrument_code=instrument_code ) try: - arctic_data = data.arctic_strategy_position.get_position_as_series_for_instrument_strategy_object( + db_data = data.db_strategy_position.get_position_as_series_for_instrument_strategy_object( instrument_strategy ) except missingData: continue data.csv_strategy_position.overwrite_position_series_for_instrument_strategy_without_checking( - instrument_strategy, arctic_data + instrument_strategy, db_data ) data.log.debug( "Backed up %s %s strategy position data" @@ -395,24 +380,24 @@ def backup_strategy_position_data(data): def backup_historical_orders(data): data.log.debug("Backing up strategy orders...") list_of_orders = [ - data.mongo_strategy_historic_orders.get_order_with_orderid(id) - for id in data.mongo_strategy_historic_orders.get_list_of_order_ids() + data.db_strategy_historic_orders.get_order_with_orderid(id) + for id in data.db_strategy_historic_orders.get_list_of_order_ids() ] data.csv_strategy_historic_orders.write_orders(list_of_orders) data.log.debug("Done") data.log.debug("Backing up contract orders...") list_of_orders = [ - data.mongo_contract_historic_orders.get_order_with_orderid(order_id) - for order_id in data.mongo_contract_historic_orders.get_list_of_order_ids() + data.db_contract_historic_orders.get_order_with_orderid(order_id) + for order_id in data.db_contract_historic_orders.get_list_of_order_ids() ] data.csv_contract_historic_orders.write_orders(list_of_orders) data.log.debug("Done") data.log.debug("Backing up broker orders...") list_of_orders = [ - data.mongo_broker_historic_orders.get_order_with_orderid(order_id) - for order_id in data.mongo_broker_historic_orders.get_list_of_order_ids() + data.db_broker_historic_orders.get_order_with_orderid(order_id) + for order_id in data.db_broker_historic_orders.get_list_of_order_ids() ] data.csv_broker_historic_orders.write_orders(list_of_orders) data.log.debug("Done") @@ -434,7 +419,7 @@ def get_dict_of_strategy_capital(data: dataBlob) -> dict: for strategy_name in strategy_list: strategy_capital_data[ strategy_name - ] = data.parquet_capital.get_capital_pd_df_for_strategy(strategy_name) + ] = data.db_capital.get_capital_pd_df_for_strategy(strategy_name) return strategy_capital_data @@ -444,7 +429,7 @@ def add_total_capital_to_strategy_capital_dict_return_df( ) -> pd.DataFrame: strategy_capital_as_df = pd.concat(capital_data, axis=1) - total_capital = data.arctic_capital.get_df_of_all_global_capital() + total_capital = data.db_capital.get_df_of_all_global_capital() capital_data = pd.concat([strategy_capital_as_df, total_capital], axis=1) capital_data = capital_data.ffill() @@ -455,33 +440,33 @@ def add_total_capital_to_strategy_capital_dict_return_df( def backup_optimal_positions(data): strategy_instrument_list = ( - data.arctic_optimal_position.get_list_of_instrument_strategies_with_optimal_position() + data.db_optimal_position.get_list_of_instrument_strategies_with_optimal_position() ) for instrument_strategy in strategy_instrument_list: try: - arctic_data = data.arctic_optimal_position.get_optimal_position_as_df_for_instrument_strategy( + db_data = data.db_optimal_position.get_optimal_position_as_df_for_instrument_strategy( instrument_strategy ) except missingData: continue data.csv_optimal_position.write_optimal_position_as_df_for_instrument_strategy_without_checking( - instrument_strategy, arctic_data + instrument_strategy, db_data ) data.log.debug("Backed up %s optimal position data" % str(instrument_strategy)) def backup_spread_cost_data(data): - spread_cost_as_series = data.mongo_spread_cost.get_spread_costs_as_series() + spread_cost_as_series = data.db_spread_cost.get_spread_costs_as_series() data.csv_spread_cost.write_all_instrument_spreads(spread_cost_as_series) data.log.debug("Backed up spread cost data") def backup_roll_state_data(data): - instrument_list = data.mongo_roll_state.get_list_of_instruments() + instrument_list = data.db_roll_state.get_list_of_instruments() roll_state_list = [] for instrument_code in instrument_list: - roll_state = data.mongo_roll_state.get_name_of_roll_state(instrument_code) + roll_state = data.db_roll_state.get_name_of_roll_state(instrument_code) roll_state_list.append(roll_state) roll_state_df = pd.DataFrame(roll_state_list, index=instrument_list) @@ -492,11 +477,11 @@ def backup_roll_state_data(data): def backup_contract_data(data): instrument_list = ( - data.mongo_futures_contract.get_list_of_all_instruments_with_contracts() + data.db_futures_contract.get_list_of_all_instruments_with_contracts() ) for instrument_code in instrument_list: contract_list = ( - data.mongo_futures_contract.get_all_contract_objects_for_instrument_code( + data.db_futures_contract.get_all_contract_objects_for_instrument_code( instrument_code ) ) @@ -514,4 +499,4 @@ def backup_csv_dump(data): if __name__ == "__main__": - backup_arctic_to_csv() + backup_db_to_csv() diff --git a/sysproduction/data/capital.py b/sysproduction/data/capital.py index 7209797205..1fd4075a2a 100644 --- a/sysproduction/data/capital.py +++ b/sysproduction/data/capital.py @@ -7,18 +7,19 @@ from sysdata.production.capital import capitalData, totalCapitalCalculationData from sysdata.production.margin import marginData, seriesOfMargin -from sysdata.pointers import parquetCapitalData from sysdata.mongodb.mongo_margin import mongoMarginData from sysdata.data_blob import dataBlob from sysproduction.data.generic_production_data import productionDataLayerGeneric +from sysproduction.data.production_data_objects import get_class_for_data_type, CAPITAL_DATA from systems.accounts.from_returns import account_curve_from_returns class dataCapital(productionDataLayerGeneric): def _add_required_classes_to_data(self, data) -> dataBlob: - data.add_class_object(parquetCapitalData) + capital_data_class = get_class_for_data_type(CAPITAL_DATA) + data.add_class_object(capital_data_class) return data diff --git a/sysproduction/data/contracts.py b/sysproduction/data/contracts.py index c7f8faa480..a08a79594f 100644 --- a/sysproduction/data/contracts.py +++ b/sysproduction/data/contracts.py @@ -1,11 +1,6 @@ import datetime -from syscore.exceptions import missingData, ContractNotFound - -from sysdata.pointers import parquetFuturesContractPriceData -from sysdata.arctic.arctic_multiple_prices import arcticFuturesMultiplePricesData -from sysdata.csv.csv_roll_parameters import csvRollParametersData -from sysdata.mongodb.mongo_futures_contracts import mongoFuturesContractData +from syscore.exceptions import missingData from sysdata.futures.contracts import futuresContractData from sysdata.futures.multiple_prices import futuresMultiplePricesData @@ -22,6 +17,7 @@ from sysproduction.data.prices import get_valid_instrument_code_from_user, diagPrices from sysproduction.data.generic_production_data import productionDataLayerGeneric +from sysproduction.data.production_data_objects import get_class_for_data_type, FUTURES_CONTRACT_PRICE_DATA, ROLL_PARAMETERS_DATA, FUTURES_MULTIPLE_PRICE_DATA, FUTURES_CONTRACT_DATA from sysdata.data_blob import dataBlob missing_expiry = datetime.datetime(1900, 1, 1) @@ -31,10 +27,10 @@ class dataContracts(productionDataLayerGeneric): def _add_required_classes_to_data(self, data) -> dataBlob: data.add_class_list( [ - parquetFuturesContractPriceData, - csvRollParametersData, - arcticFuturesMultiplePricesData, - mongoFuturesContractData, + get_class_for_data_type(FUTURES_CONTRACT_PRICE_DATA), + get_class_for_data_type(ROLL_PARAMETERS_DATA), + get_class_for_data_type(FUTURES_MULTIPLE_PRICE_DATA), + get_class_for_data_type(FUTURES_CONTRACT_DATA) ] ) diff --git a/sysproduction/data/control_process.py b/sysproduction/data/control_process.py index 1959e63be3..f85964477a 100644 --- a/sysproduction/data/control_process.py +++ b/sysproduction/data/control_process.py @@ -9,12 +9,11 @@ from sysdata.config.control_config import get_control_config from sysdata.data_blob import dataBlob -from sysdata.mongodb.mongo_process_control import mongoControlProcessData from sysdata.production.process_control_data import controlProcessData from sysproduction.data.generic_production_data import productionDataLayerGeneric - +from sysproduction.data.production_data_objects import get_class_for_data_type, PROCESS_CONTROL_DATA DEFAULT_METHOD_FREQUENCY = 60 DEFAULT_MAX_EXECUTIONS = 1 DEFAULT_START_TIME_STRING = "00:01" @@ -25,7 +24,7 @@ class dataControlProcess(productionDataLayerGeneric): def _add_required_classes_to_data(self, data) -> dataBlob: - data.add_class_object(mongoControlProcessData) + data.add_class_object(get_class_for_data_type(PROCESS_CONTROL_DATA)) return data diff --git a/sysproduction/data/currency_data.py b/sysproduction/data/currency_data.py index 0909219c95..754d998242 100644 --- a/sysproduction/data/currency_data.py +++ b/sysproduction/data/currency_data.py @@ -1,18 +1,17 @@ from syscore.constants import arg_not_supplied from syscore.interactive.menus import print_menu_of_values_and_get_response -from sysdata.arctic.arctic_spotfx_prices import arcticFxPricesData from sysdata.fx.spotfx import fxPricesData from sysdata.data_blob import dataBlob from sysobjects.spot_fx_prices import currencyValue, fxPrices from sysproduction.data.generic_production_data import productionDataLayerGeneric - +from sysproduction.data.production_data_objects import get_class_for_data_type, FX_DATA class dataCurrency(productionDataLayerGeneric): def _add_required_classes_to_data(self, data: dataBlob) -> dataBlob: - data.add_class_object(arcticFxPricesData) + data.add_class_object(get_class_for_data_type(FX_DATA)) return data @property diff --git a/sysproduction/data/instruments.py b/sysproduction/data/instruments.py index da07953908..1d9584766d 100644 --- a/sysproduction/data/instruments.py +++ b/sysproduction/data/instruments.py @@ -1,20 +1,19 @@ -from sysdata.csv.csv_instrument_data import csvFuturesInstrumentData from sysdata.data_blob import dataBlob from sysdata.futures.instruments import futuresInstrumentData from sysdata.futures.spread_costs import spreadCostData -from sysdata.mongodb.mongo_spread_costs import mongoSpreadCostData + from sysobjects.spot_fx_prices import currencyValue from sysobjects.instruments import instrumentCosts from sysproduction.data.currency_data import dataCurrency from sysproduction.data.generic_production_data import productionDataLayerGeneric from sysproduction.data.config import get_list_of_stale_instruments - +from sysproduction.data.production_data_objects import FX_DATA, SPREAD_DATA, get_class_for_data_type, FUTURES_INSTRUMENT_DATA class updateSpreadCosts(productionDataLayerGeneric): def _add_required_classes_to_data(self, data) -> dataBlob: - data.add_class_object(mongoSpreadCostData) + data.add_class_object(get_class_for_data_type(SPREAD_DATA)) return data def update_spread_costs(self, instrument_code: str, spread_cost: float): @@ -34,7 +33,10 @@ def db_spread_cost_data(self) -> spreadCostData: class diagInstruments(productionDataLayerGeneric): def _add_required_classes_to_data(self, data: dataBlob) -> dataBlob: - data.add_class_list([csvFuturesInstrumentData, mongoSpreadCostData]) + data.add_class_list([ + get_class_for_data_type(FUTURES_INSTRUMENT_DATA), + get_class_for_data_type(SPREAD_DATA)]) + return data def get_spread_costs_as_series(self): diff --git a/sysproduction/data/optimal_positions.py b/sysproduction/data/optimal_positions.py index 60b67eb9bd..c11ec2bfbc 100644 --- a/sysproduction/data/optimal_positions.py +++ b/sysproduction/data/optimal_positions.py @@ -3,7 +3,6 @@ import pandas as pd from sysdata.data_blob import dataBlob -from sysdata.arctic.arctic_optimal_positions import arcticOptimalPositionData from sysdata.production.optimal_positions import optimalPositionData from sysobjects.production.optimal_positions import ( listOfOptimalPositionsAcrossInstrumentStrategies, @@ -18,11 +17,11 @@ get_list_of_stale_instruments, get_list_of_stale_strategies, ) - +from sysproduction.data.production_data_objects import get_class_for_data_type, OPTIMAL_POSITION_DATA class dataOptimalPositions(productionDataLayerGeneric): def _add_required_classes_to_data(self, data) -> dataBlob: - data.add_class_object(arcticOptimalPositionData) + data.add_class_object(get_class_for_data_type(OPTIMAL_POSITION_DATA)) return data diff --git a/sysproduction/data/orders.py b/sysproduction/data/orders.py index bc7caaea10..634e130c61 100644 --- a/sysproduction/data/orders.py +++ b/sysproduction/data/orders.py @@ -2,16 +2,6 @@ from syscore.constants import arg_not_supplied from sysexecution.orders.named_order_objects import missing_order, no_parent -from sysdata.mongodb.mongo_order_stack import ( - mongoInstrumentOrderStackData, - mongoContractOrderStackData, - mongoBrokerOrderStackData, -) -from sysdata.mongodb.mongo_historic_orders import ( - mongoStrategyHistoricOrdersData, - mongoContractHistoricOrdersData, - mongoBrokerHistoricOrdersData, -) from sysdata.production.historic_orders import ( brokerHistoricOrdersData, contractHistoricOrdersData, @@ -34,6 +24,8 @@ from sysobjects.production.tradeable_object import instrumentStrategy, futuresContract +from sysproduction.data.production_data_objects import get_class_for_data_type, INSTRUMENT_ORDER_STACK_DATA, CONTRACT_ORDER_STACK_DATA, BROKER_HISTORIC_ORDERS_DATA, STRATEGY_HISTORIC_ORDERS_DATA, CONTRACT_HISTORIC_ORDERS_DATA, BROKER_ORDER_STACK_DATA + class dataOrders(object): def __init__(self, data: dataBlob = arg_not_supplied): @@ -42,12 +34,12 @@ def __init__(self, data: dataBlob = arg_not_supplied): data = dataBlob() data.add_class_list( [ - mongoInstrumentOrderStackData, - mongoContractOrderStackData, - mongoBrokerOrderStackData, - mongoContractHistoricOrdersData, - mongoStrategyHistoricOrdersData, - mongoBrokerHistoricOrdersData, + get_class_for_data_type(INSTRUMENT_ORDER_STACK_DATA), + get_class_for_data_type(CONTRACT_ORDER_STACK_DATA), + get_class_for_data_type(BROKER_ORDER_STACK_DATA), + get_class_for_data_type(STRATEGY_HISTORIC_ORDERS_DATA), + get_class_for_data_type(CONTRACT_HISTORIC_ORDERS_DATA), + get_class_for_data_type(BROKER_HISTORIC_ORDERS_DATA) ] ) self._data = data diff --git a/sysproduction/data/positions.py b/sysproduction/data/positions.py index 8e7e47a4eb..a7314373e8 100644 --- a/sysproduction/data/positions.py +++ b/sysproduction/data/positions.py @@ -7,10 +7,6 @@ from syscore.exceptions import ContractNotFound from sysexecution.orders.named_order_objects import missing_order -from sysdata.mongodb.mongo_roll_state_storage import mongoRollStateData -from sysdata.arctic.arctic_historic_contract_positions import arcticContractPositionData -from sysdata.arctic.arctic_historic_strategy_positions import arcticStrategyPositionData - from sysdata.production.roll_state import rollStateData from sysdata.production.historic_contract_positions import contractPositionData @@ -18,8 +14,6 @@ strategyPositionData, listOfInstrumentStrategyPositions, ) -from sysproduction.data.contracts import dataContracts - from sysdata.data_blob import dataBlob @@ -43,12 +37,16 @@ from sysproduction.data.generic_production_data import productionDataLayerGeneric from sysproduction.data.contracts import dataContracts - +from sysproduction.data.production_data_objects import get_class_for_data_type, ROLL_STATE_DATA, STRATEGY_POSITION_DATA, CONTRACT_POSITION_DATA class diagPositions(productionDataLayerGeneric): def _add_required_classes_to_data(self, data) -> dataBlob: data.add_class_list( - [mongoRollStateData, arcticStrategyPositionData, arcticContractPositionData] + [ + get_class_for_data_type(ROLL_STATE_DATA), + get_class_for_data_type(STRATEGY_POSITION_DATA), + get_class_for_data_type(CONTRACT_POSITION_DATA) + ] ) return data diff --git a/sysproduction/data/prices.py b/sysproduction/data/prices.py index e5d9ac6b62..f24a3e64b3 100644 --- a/sysproduction/data/prices.py +++ b/sysproduction/data/prices.py @@ -13,20 +13,11 @@ ) from sysobjects.spreads import spreadsForInstrument -from sysdata.pointers import parquetFuturesContractPriceData from sysobjects.futures_per_contract_prices import futuresContractPrices -from sysdata.arctic.arctic_multiple_prices import ( - arcticFuturesMultiplePricesData, - futuresMultiplePrices, -) -from sysdata.pointers import parquetFuturesAdjustedPricesData +from sysobjects.multiple_prices import futuresMultiplePrices from sysobjects.adjusted_prices import futuresAdjustedPrices -from sysdata.arctic.arctic_spreads import ( - arcticSpreadsForInstrumentData, - spreadsForInstrumentData, -) -from sysdata.mongodb.mongo_futures_contracts import mongoFuturesContractData +from sysdata.futures.spreads import spreadsForInstrumentData from sysdata.futures.multiple_prices import futuresMultiplePricesData from sysdata.futures.adjusted_prices import futuresAdjustedPricesData @@ -44,6 +35,7 @@ ## default for spike checking from sysproduction.data.instruments import diagInstruments, get_block_size +from sysproduction.data.production_data_objects import get_class_for_data_type, FUTURES_CONTRACT_PRICE_DATA, FUTURES_ADJUSTED_PRICE_DATA, FUTURES_MULTIPLE_PRICE_DATA, FUTURES_CONTRACT_DATA, SPREAD_DATA VERY_BIG_NUMBER = 999999.0 @@ -52,11 +44,11 @@ class diagPrices(productionDataLayerGeneric): def _add_required_classes_to_data(self, data) -> dataBlob: data.add_class_list( [ - parquetFuturesContractPriceData, - parquetFuturesAdjustedPricesData, - arcticFuturesMultiplePricesData, - mongoFuturesContractData, - arcticSpreadsForInstrumentData, + get_class_for_data_type(FUTURES_CONTRACT_PRICE_DATA), + get_class_for_data_type(FUTURES_ADJUSTED_PRICE_DATA), + get_class_for_data_type(FUTURES_CONTRACT_DATA), + get_class_for_data_type(SPREAD_DATA), + get_class_for_data_type(FUTURES_MULTIPLE_PRICE_DATA) ] ) return data diff --git a/sysproduction/data/production_data_objects.py b/sysproduction/data/production_data_objects.py new file mode 100644 index 0000000000..c9c31b778b --- /dev/null +++ b/sysproduction/data/production_data_objects.py @@ -0,0 +1,88 @@ +from sysdata.parquet.parquet_adjusted_prices import parquetFuturesAdjustedPricesData +from sysdata.parquet.parquet_capital import parquetCapitalData +from sysdata.parquet.parquet_futures_per_contract_prices import parquetFuturesContractPriceData + +from sysdata.arctic.arctic_adjusted_prices import arcticFuturesAdjustedPricesData +from sysdata.arctic.arctic_capital import arcticCapitalData +from sysdata.arctic.arctic_futures_per_contract_prices import arcticFuturesContractPriceData +from sysdata.arctic.arctic_spotfx_prices import arcticFxPricesData +from sysdata.arctic.arctic_historic_contract_positions import arcticContractPositionData +from sysdata.arctic.arctic_historic_strategy_positions import arcticStrategyPositionData +from sysdata.arctic.arctic_multiple_prices import arcticFuturesMultiplePricesData +from sysdata.arctic.arctic_optimal_positions import arcticOptimalPositionData +from sysdata.arctic.arctic_spreads import arcticSpreadsForInstrumentData + + +from sysdata.mongodb.mongo_futures_contracts import mongoFuturesContractData +from sysdata.mongodb.mongo_process_control import mongoControlProcessData +from sysdata.mongodb.mongo_order_stack import ( + mongoInstrumentOrderStackData, + mongoContractOrderStackData, + mongoBrokerOrderStackData, +) +from sysdata.mongodb.mongo_historic_orders import ( + mongoStrategyHistoricOrdersData, + mongoContractHistoricOrdersData, + mongoBrokerHistoricOrdersData, +) +from sysdata.mongodb.mongo_roll_state_storage import mongoRollStateData + + +from sysdata.csv.csv_instrument_data import csvFuturesInstrumentData +from sysdata.csv.csv_roll_parameters import csvRollParametersData + +FUTURES_CONTRACT_PRICE_DATA = "futures_contract_price_data" +FUTURES_MULTIPLE_PRICE_DATA = "futures_multiple_price_data" +FUTURES_ADJUSTED_PRICE_DATA = "futures_adjusted_price_data" +CAPITAL_DATA = "capital_data" +CONTRACT_POSITION_DATA = "contract_position_data" +STRATEGY_POSITION_DATA = "strategy_position_data" +OPTIMAL_POSITION_DATA = "optimal_position_data" +SPREAD_DATA = "spread_data" +FX_DATA = "fx_data" +ROLL_PARAMETERS_DATA = "roll_parameters_data" +FUTURES_CONTRACT_DATA = "futures_contract_data" +PROCESS_CONTROL_DATA= "process_control_data" +FUTURES_INSTRUMENT_DATA = "futures_instrument_data" +INSTRUMENT_ORDER_STACK_DATA = "instrument_order_stack_data" +CONTRACT_ORDER_STACK_DATA = "contract_order_stack_data" +BROKER_ORDER_STACK_DATA = "broker_order_stack_data" +STRATEGY_HISTORIC_ORDERS_DATA = "strategy_historic_orders_data" +CONTRACT_HISTORIC_ORDERS_DATA = "contract_historic_orders_data" +BROKER_HISTORIC_ORDERS_DATA = "broker_historic_orders_data" +ROLL_STATE_DATA = "roll_state_data" + +use_production_classes = { + FX_DATA: arcticFxPricesData, + ROLL_PARAMETERS_DATA: csvRollParametersData, + FUTURES_INSTRUMENT_DATA: csvFuturesInstrumentData, + FUTURES_CONTRACT_DATA: mongoFuturesContractData, + + FUTURES_CONTRACT_PRICE_DATA: parquetFuturesContractPriceData, + FUTURES_MULTIPLE_PRICE_DATA: arcticFuturesMultiplePricesData, + FUTURES_ADJUSTED_PRICE_DATA: parquetFuturesAdjustedPricesData, + + CAPITAL_DATA: parquetCapitalData, + + CONTRACT_POSITION_DATA: arcticContractPositionData, + STRATEGY_POSITION_DATA: arcticStrategyPositionData, + OPTIMAL_POSITION_DATA: arcticOptimalPositionData, + SPREAD_DATA: arcticSpreadsForInstrumentData, + + STRATEGY_HISTORIC_ORDERS_DATA: mongoStrategyHistoricOrdersData, + CONTRACT_HISTORIC_ORDERS_DATA: mongoContractHistoricOrdersData, + BROKER_HISTORIC_ORDERS_DATA: mongoBrokerHistoricOrdersData, + + INSTRUMENT_ORDER_STACK_DATA: mongoInstrumentOrderStackData, + CONTRACT_ORDER_STACK_DATA: mongoContractOrderStackData, + BROKER_HISTORIC_ORDERS_DATA: mongoBrokerOrderStackData, + + ROLL_STATE_DATA: mongoRollStateData, + + PROCESS_CONTROL_DATA: mongoControlProcessData +} + +def get_class_for_data_type(data_type:str): + + return use_production_classes[data_type] + diff --git a/sysproduction/data/sim_data.py b/sysproduction/data/sim_data.py index bcbd921a1c..2eb3c622cb 100644 --- a/sysproduction/data/sim_data.py +++ b/sysproduction/data/sim_data.py @@ -2,13 +2,8 @@ from sysdata.sim.db_futures_sim_data import dbFuturesSimData from sysdata.data_blob import dataBlob -from sysdata.parquet.parquet_adjusted_prices import parquetFuturesAdjustedPricesData -from sysdata.arctic.arctic_multiple_prices import arcticFuturesMultiplePricesData -from sysdata.arctic.arctic_spotfx_prices import arcticFxPricesData -from sysdata.csv.csv_instrument_data import csvFuturesInstrumentData -from sysdata.mongodb.mongo_spread_costs import mongoSpreadCostData -from sysdata.csv.csv_roll_parameters import csvRollParametersData +from sysproduction.data.production_data_objects import get_class_for_data_type, FUTURES_ADJUSTED_PRICE_DATA, FUTURES_MULTIPLE_PRICE_DATA, FX_DATA, SPREAD_DATA, FUTURES_INSTRUMENT_DATA, ROLL_PARAMETERS_DATA def get_sim_data_object_for_production(data=arg_not_supplied) -> dbFuturesSimData: # Check data has the right elements to do this @@ -17,12 +12,12 @@ def get_sim_data_object_for_production(data=arg_not_supplied) -> dbFuturesSimDat data.add_class_list( [ - parquetFuturesAdjustedPricesData, - arcticFuturesMultiplePricesData, - arcticFxPricesData, - mongoSpreadCostData, - csvFuturesInstrumentData, - csvRollParametersData, + get_class_for_data_type(FUTURES_ADJUSTED_PRICE_DATA), + get_class_for_data_type(FUTURES_MULTIPLE_PRICE_DATA), + get_class_for_data_type(FX_DATA), + get_class_for_data_type(SPREAD_DATA), + get_class_for_data_type(FUTURES_INSTRUMENT_DATA), + get_class_for_data_type(ROLL_PARAMETERS_DATA) ] ) diff --git a/sysproduction/data/volumes.py b/sysproduction/data/volumes.py index 1a5cf11a20..532daa7872 100644 --- a/sysproduction/data/volumes.py +++ b/sysproduction/data/volumes.py @@ -1,12 +1,12 @@ import datetime as datetime import pandas as pd from syscore.exceptions import missingData -from sysdata.pointers import parquetFuturesContractPriceData from sysdata.futures.futures_per_contract_prices import futuresContractPriceData from sysobjects.contracts import futuresContract from sysdata.data_blob import dataBlob from sysproduction.data.generic_production_data import productionDataLayerGeneric +from sysproduction.data.production_data_objects import FUTURES_CONTRACT_PRICE_DATA, get_class_for_data_type # Get volume data for the contract we're currently trading, plus what we might roll into, plus the previous one # This is handy for working out whether to roll @@ -16,7 +16,7 @@ class diagVolumes(productionDataLayerGeneric): def _add_required_classes_to_data(self, data) -> dataBlob: - data.add_class_object(parquetFuturesContractPriceData) + data.add_class_object(get_class_for_data_type(FUTURES_CONTRACT_PRICE_DATA)) return data @property From 5cccbc2c8e59ea93e2ababc1ecb8385e96aaa3ac Mon Sep 17 00:00:00 2001 From: Rob Carver Date: Mon, 20 Nov 2023 10:40:25 +0000 Subject: [PATCH 092/235] bug --- sysdata/pointers.py | 8 ++++---- sysinit/futures/rollcalendars_from_arcticprices_to_csv.py | 5 ++++- 2 files changed, 8 insertions(+), 5 deletions(-) diff --git a/sysdata/pointers.py b/sysdata/pointers.py index 512afa7de5..7293a35ba7 100644 --- a/sysdata/pointers.py +++ b/sysdata/pointers.py @@ -12,9 +12,9 @@ pass ## TO USE ARCTIC RATHER THAN PARQUET, REPLACE THE og_ with the relevant arctic class -parquet_futures_adjusted_price_data = parquetFuturesAdjustedPricesData(parquet_root) ## replace with arcticFuturesContractPriceData() if desired +#parquet_futures_adjusted_price_data = parquetFuturesAdjustedPricesData(parquet_root) ## replace with arcticFuturesContractPriceData() if desired -parquetCapitalData = og_parquetCapitalData +#parquetCapitalData = og_parquetCapitalData -parquetFuturesContractPriceData = og_parquetFuturesContractPriceData -parquet_futures_contract_price_data = parquetFuturesContractPriceData(parquet_root) \ No newline at end of file +#parquetFuturesContractPriceData = og_parquetFuturesContractPriceData +#parquet_futures_contract_price_data = parquetFuturesContractPriceData(parquet_root) \ No newline at end of file diff --git a/sysinit/futures/rollcalendars_from_arcticprices_to_csv.py b/sysinit/futures/rollcalendars_from_arcticprices_to_csv.py index 418281381c..a942af8448 100755 --- a/sysinit/futures/rollcalendars_from_arcticprices_to_csv.py +++ b/sysinit/futures/rollcalendars_from_arcticprices_to_csv.py @@ -1,13 +1,16 @@ from syscore.interactive.input import true_if_answer_is_yes from syscore.constants import arg_not_supplied -from sysdata.pointers import parquet_futures_contract_price_data from sysobjects.rolls import rollParameters from sysobjects.roll_calendars import rollCalendar from sysdata.csv.csv_roll_calendars import csvRollCalendarData from sysdata.csv.csv_roll_parameters import csvRollParametersData from sysdata.futures.rolls_parameters import rollParametersData from sysproduction.data.prices import get_valid_instrument_code_from_user +from sysproduction.data.production_data_objects import get_class_for_data_type, FUTURES_CONTRACT_PRICE_DATA + + +parquet_futures_contract_price_data = get_class_for_data_type(FUTURES_CONTRACT_PRICE_DATA) """ Generate a 'best guess' roll calendar based on some price data for individual contracts From 4ba26cdc02a4be1686aef856ade87f0b7ac11033 Mon Sep 17 00:00:00 2001 From: Rob Carver Date: Mon, 20 Nov 2023 10:44:36 +0000 Subject: [PATCH 093/235] bug --- ...prices_from_arcticprices_and_csv_calendars_to_arctic.py | 7 ++++++- sysinit/futures/rollcalendars_from_arcticprices_to_csv.py | 7 +++++-- 2 files changed, 11 insertions(+), 3 deletions(-) diff --git a/sysinit/futures/multipleprices_from_arcticprices_and_csv_calendars_to_arctic.py b/sysinit/futures/multipleprices_from_arcticprices_and_csv_calendars_to_arctic.py index 3c42e069f4..a3b9cc5023 100755 --- a/sysinit/futures/multipleprices_from_arcticprices_and_csv_calendars_to_arctic.py +++ b/sysinit/futures/multipleprices_from_arcticprices_and_csv_calendars_to_arctic.py @@ -16,7 +16,8 @@ import datetime import pandas as pd -from sysdata.pointers import parquet_futures_contract_price_data + +from sysproduction.data.prices import diagPrices from sysobjects.rolls import rollParameters, contractDateWithRollParameters from sysobjects.contract_dates_and_expiries import contractDate @@ -27,6 +28,10 @@ from sysinit.futures.build_roll_calendars import adjust_to_price_series from sysobjects.multiple_prices import futuresMultiplePrices +from sysdata.data_blob import dataBlob + +diag_prices = diagPrices() +parquet_futures_contract_price_data = diag_prices.db_futures_contract_price_data def _get_data_inputs(csv_roll_data_path, csv_multiple_data_path): csv_roll_calendars = csvRollCalendarData(csv_roll_data_path) diff --git a/sysinit/futures/rollcalendars_from_arcticprices_to_csv.py b/sysinit/futures/rollcalendars_from_arcticprices_to_csv.py index a942af8448..dfafeb64f5 100755 --- a/sysinit/futures/rollcalendars_from_arcticprices_to_csv.py +++ b/sysinit/futures/rollcalendars_from_arcticprices_to_csv.py @@ -6,11 +6,14 @@ from sysdata.csv.csv_roll_calendars import csvRollCalendarData from sysdata.csv.csv_roll_parameters import csvRollParametersData from sysdata.futures.rolls_parameters import rollParametersData -from sysproduction.data.prices import get_valid_instrument_code_from_user +from sysproduction.data.prices import get_valid_instrument_code_from_user, diagPrices from sysproduction.data.production_data_objects import get_class_for_data_type, FUTURES_CONTRACT_PRICE_DATA +from sysdata.data_blob import dataBlob -parquet_futures_contract_price_data = get_class_for_data_type(FUTURES_CONTRACT_PRICE_DATA) +diag_prices = diagPrices() + +parquet_futures_contract_price_data = diag_prices.db_futures_contract_price_data """ Generate a 'best guess' roll calendar based on some price data for individual contracts From 4d45c87ad8bf7e8d71a9d8707c3f1eaec02bf45a Mon Sep 17 00:00:00 2001 From: Rob Carver Date: Mon, 20 Nov 2023 10:46:34 +0000 Subject: [PATCH 094/235] bug --- sysproduction/data/control_process.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sysproduction/data/control_process.py b/sysproduction/data/control_process.py index f85964477a..ea7c66e979 100644 --- a/sysproduction/data/control_process.py +++ b/sysproduction/data/control_process.py @@ -125,7 +125,7 @@ def log_end_run_for_method(self, process_name: str, method_name: str): class diagControlProcess(productionDataLayerGeneric): def _add_required_classes_to_data(self, data) -> dataBlob: - data.add_class_object(mongoControlProcessData) + data.add_class_object(get_class_for_data_type(PROCESS_CONTROL_DATA)) return data From 2d0cc02a60b65772677cbf3b1ba26a7d966407f3 Mon Sep 17 00:00:00 2001 From: Rob Carver Date: Mon, 20 Nov 2023 11:04:38 +0000 Subject: [PATCH 095/235] bug --- sysproduction/data/positions.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/sysproduction/data/positions.py b/sysproduction/data/positions.py index a7314373e8..7b970f9497 100644 --- a/sysproduction/data/positions.py +++ b/sysproduction/data/positions.py @@ -390,8 +390,11 @@ def get_position_in_priced_contract_for_instrument( class updatePositions(productionDataLayerGeneric): def _add_required_classes_to_data(self, data) -> dataBlob: - data.add_class_list( - [mongoRollStateData, arcticStrategyPositionData, arcticContractPositionData] + data.add_class_list([ + get_class_for_data_type(ROLL_STATE_DATA), + get_class_for_data_type(STRATEGY_POSITION_DATA), + get_class_for_data_type(CONTRACT_POSITION_DATA) + ] ) return data From 061ab4d190cb245d130137955f5a86a829e2f724 Mon Sep 17 00:00:00 2001 From: Rob Carver Date: Mon, 20 Nov 2023 11:07:35 +0000 Subject: [PATCH 096/235] bug --- sysproduction/data/prices.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/sysproduction/data/prices.py b/sysproduction/data/prices.py index f24a3e64b3..32e69b827d 100644 --- a/sysproduction/data/prices.py +++ b/sysproduction/data/prices.py @@ -239,11 +239,11 @@ class updatePrices(productionDataLayerGeneric): def _add_required_classes_to_data(self, data) -> dataBlob: data.add_class_list( [ - parquetFuturesContractPriceData, - arcticFuturesMultiplePricesData, - mongoFuturesContractData, - parquetFuturesAdjustedPricesData, - arcticSpreadsForInstrumentData, + get_class_for_data_type(FUTURES_CONTRACT_PRICE_DATA), + get_class_for_data_type(FUTURES_MULTIPLE_PRICE_DATA), + get_class_for_data_type(FUTURES_CONTRACT_DATA), + get_class_for_data_type(FUTURES_ADJUSTED_PRICE_DATA), + get_class_for_data_type(SPREAD_DATA) ] ) From 30ae97da9b0cda31d49a4bc17e5f761f7c4a3076 Mon Sep 17 00:00:00 2001 From: Rob Carver Date: Mon, 20 Nov 2023 11:20:11 +0000 Subject: [PATCH 097/235] removed redundant pointers.py --- data/tools/contract_comparison.py | 6 +++-- sysdata/pointers.py | 20 ---------------- sysinit/futures/clone_data_for_instrument.py | 15 ++++++------ .../contract_prices_from_csv_to_arctic.py | 23 ++++++++++--------- sysinit/futures/create_hourly_and_daily.py | 8 ++++--- ...ultiple_and_adjusted_from_csv_to_arctic.py | 14 ++++++----- sysinit/futures/repocsv_adjusted_prices.py | 10 +++++--- 7 files changed, 43 insertions(+), 53 deletions(-) delete mode 100644 sysdata/pointers.py diff --git a/data/tools/contract_comparison.py b/data/tools/contract_comparison.py index de66a39d47..a210a6778b 100644 --- a/data/tools/contract_comparison.py +++ b/data/tools/contract_comparison.py @@ -1,8 +1,10 @@ -from sysdata.pointers import parquet_futures_contract_price_data +from sysproduction.data.prices import diagPrices + from sysobjects.contracts import futuresContract import pandas as pd +diag_prices = diagPrices() class ContractComparison: """Class for comparing futures contracts side by side on different dimensions""" @@ -25,7 +27,7 @@ def _create_comparison( instrument_object=instrument_code, contract_date_object=forward_date_str ) - contract_prices = parquet_futures_contract_price_data + contract_prices = diag_prices.db_futures_contract_price_data price_prices = contract_prices.get_merged_prices_for_contract_object( price_contract ) diff --git a/sysdata/pointers.py b/sysdata/pointers.py deleted file mode 100644 index 7293a35ba7..0000000000 --- a/sysdata/pointers.py +++ /dev/null @@ -1,20 +0,0 @@ -from sysdata.arctic.arctic_adjusted_prices import arcticFuturesAdjustedPricesData - -from sysdata.arctic.arctic_capital import arcticCapitalData -from sysdata.arctic.arctic_futures_per_contract_prices import arcticFuturesContractPriceData -from sysdata.data_blob import get_parquet_root_directory -from sysdata.config.production_config import get_production_config - -try: - parquet_root = get_parquet_root_directory(get_production_config()) -except: - ## fine if not using parquet - pass - -## TO USE ARCTIC RATHER THAN PARQUET, REPLACE THE og_ with the relevant arctic class -#parquet_futures_adjusted_price_data = parquetFuturesAdjustedPricesData(parquet_root) ## replace with arcticFuturesContractPriceData() if desired - -#parquetCapitalData = og_parquetCapitalData - -#parquetFuturesContractPriceData = og_parquetFuturesContractPriceData -#parquet_futures_contract_price_data = parquetFuturesContractPriceData(parquet_root) \ No newline at end of file diff --git a/sysinit/futures/clone_data_for_instrument.py b/sysinit/futures/clone_data_for_instrument.py index 96a1b2ab4f..447a64c30e 100644 --- a/sysinit/futures/clone_data_for_instrument.py +++ b/sysinit/futures/clone_data_for_instrument.py @@ -1,6 +1,6 @@ -from sysdata.pointers import parquet_futures_contract_price_data -from sysdata.arctic.arctic_multiple_prices import arcticFuturesMultiplePricesData -from sysdata.pointers import parquet_futures_adjusted_price_data + +from sysproduction.data.prices import diagPrices + from sysdata.csv.csv_roll_calendars import csvRollCalendarData from sysdata.csv.csv_multiple_prices import csvFuturesMultiplePricesData from sysdata.csv.csv_adjusted_prices import csvFuturesAdjustedPricesData @@ -9,13 +9,12 @@ from sysobjects.contracts import futuresContract from syscore.dateutils import DAILY_PRICE_FREQ, HOURLY_FREQ -from sysobjects.multiple_prices import futuresMultiplePrices from sysobjects.adjusted_prices import futuresAdjustedPrices - -db_data_individual_prices = parquet_futures_contract_price_data -db_data_multiple_prices = arcticFuturesMultiplePricesData() -db_data_adjusted_prices = parquet_futures_adjusted_price_data +diag_prices = diagPrices() +db_data_individual_prices = diag_prices.db_futures_contract_price_data +db_data_multiple_prices = diag_prices.db_futures_multiple_prices_data +db_data_adjusted_prices = diag_prices.db_futures_adjusted_prices_data csv_roll_calendar = csvRollCalendarData() csv_multiple = csvFuturesMultiplePricesData() diff --git a/sysinit/futures/contract_prices_from_csv_to_arctic.py b/sysinit/futures/contract_prices_from_csv_to_arctic.py index e9c30f9f0a..ebc80df75e 100644 --- a/sysinit/futures/contract_prices_from_csv_to_arctic.py +++ b/sysinit/futures/contract_prices_from_csv_to_arctic.py @@ -1,33 +1,34 @@ from syscore.constants import arg_not_supplied from sysdata.csv.csv_futures_contract_prices import csvFuturesContractPriceData -from sysdata.pointers import parquet_futures_contract_price_data +from sysproduction.data.prices import diagPrices from sysobjects.contracts import futuresContract +diag_prices = diagPrices() -def init_arctic_with_csv_futures_contract_prices( +def init_db_with_csv_futures_contract_prices( datapath: str, csv_config=arg_not_supplied ): csv_prices = csvFuturesContractPriceData(datapath) input( - "WARNING THIS WILL ERASE ANY EXISTING ARCTIC PRICES WITH DATA FROM %s ARE YOU SURE?! (CTRL-C TO STOP)" + "WARNING THIS WILL ERASE ANY EXISTING DATABASE PRICES WITH DATA FROM %s ARE YOU SURE?! (CTRL-C TO STOP)" % csv_prices.datapath ) instrument_codes = csv_prices.get_list_of_instrument_codes_with_merged_price_data() instrument_codes.sort() for instrument_code in instrument_codes: - init_arctic_with_csv_futures_contract_prices_for_code( + init_db_with_csv_futures_contract_prices_for_code( instrument_code, datapath, csv_config=csv_config ) -def init_arctic_with_csv_futures_contract_prices_for_code( +def init_db_with_csv_futures_contract_prices_for_code( instrument_code: str, datapath: str, csv_config=arg_not_supplied ): print(instrument_code) csv_prices = csvFuturesContractPriceData(datapath, config=csv_config) - arctic_prices = parquet_futures_contract_price_data + db_prices = diag_prices.db_futures_contract_price_data print("Getting .csv prices may take some time") csv_price_dict = csv_prices.get_merged_prices_for_instrument(instrument_code) @@ -40,12 +41,12 @@ def init_arctic_with_csv_futures_contract_prices_for_code( print(".csv prices are \n %s" % str(prices_for_contract)) contract = futuresContract(instrument_code, contract_date_str) print("Contract object is %s" % str(contract)) - print("Writing to arctic") - arctic_prices.write_merged_prices_for_contract_object( + print("Writing to db") + db_prices.write_merged_prices_for_contract_object( contract, prices_for_contract, ignore_duplication=True ) - print("Reading back prices from arctic to check") - written_prices = arctic_prices.get_merged_prices_for_contract_object(contract) + print("Reading back prices from db to check") + written_prices = db_prices.get_merged_prices_for_contract_object(contract) print("Read back prices are \n %s" % str(written_prices)) @@ -53,4 +54,4 @@ def init_arctic_with_csv_futures_contract_prices_for_code( input("Will overwrite existing prices are you sure?! CTL-C to abort") # modify flags as required datapath = "*** NEED TO DEFINE A DATAPATH***" - init_arctic_with_csv_futures_contract_prices(datapath) + init_db_with_csv_futures_contract_prices(datapath) diff --git a/sysinit/futures/create_hourly_and_daily.py b/sysinit/futures/create_hourly_and_daily.py index 6fb212f613..1d0fa8d2b5 100644 --- a/sysinit/futures/create_hourly_and_daily.py +++ b/sysinit/futures/create_hourly_and_daily.py @@ -3,10 +3,12 @@ closing_date_rows_in_pd_object, get_intraday_pdf_at_frequency, ) -from sysdata.pointers import parquet_futures_contract_price_data +from sysproduction.data.prices import diagPrices + +diag_prices = diagPrices() def write_split_data_for_instrument(instrument_code): - a = parquet_futures_contract_price_data + a = diag_prices.db_futures_contract_price_data list_of_contracts = a.contracts_with_merged_price_data_for_instrument_code( instrument_code ) @@ -38,7 +40,7 @@ def write_split_data_for_instrument(instrument_code): "This script will delete any existing hourly and daily data in parquet, and replace with hourly and data inferred from 'merged' (legacy) data. CTL-C to abort" ) - a = parquet_futures_contract_price_data + a = diag_prices.db_futures_contract_price_data instrument_list = a.get_list_of_instrument_codes_with_merged_price_data() for instrument_code in instrument_list: print(instrument_code) diff --git a/sysinit/futures/multiple_and_adjusted_from_csv_to_arctic.py b/sysinit/futures/multiple_and_adjusted_from_csv_to_arctic.py index 507550eda4..4328689d1c 100644 --- a/sysinit/futures/multiple_and_adjusted_from_csv_to_arctic.py +++ b/sysinit/futures/multiple_and_adjusted_from_csv_to_arctic.py @@ -1,8 +1,10 @@ from syscore.constants import arg_not_supplied from sysdata.csv.csv_multiple_prices import csvFuturesMultiplePricesData from sysdata.csv.csv_adjusted_prices import csvFuturesAdjustedPricesData -from sysdata.arctic.arctic_multiple_prices import arcticFuturesMultiplePricesData -from sysdata.pointers import parquet_futures_adjusted_price_data + +from sysproduction.data.prices import diagPrices + +diag_prices = diagPrices() def init_arctic_with_csv_futures_contract_prices( multiple_price_datapath=arg_not_supplied, adj_price_datapath=arg_not_supplied @@ -30,18 +32,18 @@ def init_arctic_with_csv_prices_for_code( ): print(instrument_code) csv_mult_data = csvFuturesMultiplePricesData(multiple_price_datapath) - arctic_mult_data = arcticFuturesMultiplePricesData() + db_mult_data = diagPrices.db_futures_multiple_prices_data mult_prices = csv_mult_data.get_multiple_prices(instrument_code) - arctic_mult_data.add_multiple_prices( + db_mult_data.add_multiple_prices( instrument_code, mult_prices, ignore_duplication=True ) csv_adj_data = csvFuturesAdjustedPricesData(adj_price_datapath) - parquet_adj_data = parquet_futures_adjusted_price_data + db_adj_data = diagPrices.db_futures_adjusted_prices_data adj_prices = csv_adj_data.get_adjusted_prices(instrument_code) - parquet_adj_data.add_adjusted_prices( + db_adj_data.add_adjusted_prices( instrument_code, adj_prices, ignore_duplication=True ) diff --git a/sysinit/futures/repocsv_adjusted_prices.py b/sysinit/futures/repocsv_adjusted_prices.py index 353413951b..5bfb9372fb 100755 --- a/sysinit/futures/repocsv_adjusted_prices.py +++ b/sysinit/futures/repocsv_adjusted_prices.py @@ -2,12 +2,16 @@ Copy from csv repo files to arctic for adjusted prices """ from syscore.constants import arg_not_supplied -from sysdata.pointers import parquet_futures_adjusted_price_data + +from sysproduction.data.prices import diagPrices + from sysdata.csv.csv_adjusted_prices import csvFuturesAdjustedPricesData if __name__ == "__main__": + diag_prices = diagPrices() + input("Will overwrite existing prices are you sure?! CTL-C to abort") - parquet_adjusted_prices = parquet_futures_adjusted_price_data + db_adjusted_prices = diag_prices.db_futures_adjusted_prices_data ## MODIFY PATH TO USE SOMETHING OTHER THAN DEFAULT csv_adj_datapath = arg_not_supplied @@ -26,6 +30,6 @@ print(adjusted_prices) - parquet_adjusted_prices.add_adjusted_prices( + db_adjusted_prices.add_adjusted_prices( instrument_code, adjusted_prices, ignore_duplication=True ) From 21bedad072ca6d514b9bba21e497e6343d56594f Mon Sep 17 00:00:00 2001 From: Rob Carver Date: Mon, 20 Nov 2023 11:38:35 +0000 Subject: [PATCH 098/235] removed all direct arctic references --- docs/data.md | 16 ++-- .../adjustedprices_from_db_multiple_to_db.py | 63 ++++++++++++++++ ...stedprices_from_mongo_multiple_to_mongo.py | 75 ------------------- ...from_db_prices_and_csv_calendars_to_db.py} | 67 ++++++----------- sysinit/futures/repocsv_multiple_prices.py | 10 ++- sysinit/futures/repocsv_spotfx_prices.py | 9 ++- ...> rollcalendars_from_db_multipleprices.py} | 12 ++- .../futures/safely_modify_roll_parameters.py | 22 ++---- ...potfx_from_csvAndInvestingDotCom_to_db.py} | 17 ++--- ...tic_to_csv.py => spotfx_from_db_to_csv.py} | 8 +- sysinit/transfer/backup_arctic_to_parquet.py | 4 +- 11 files changed, 137 insertions(+), 166 deletions(-) create mode 100755 sysinit/futures/adjustedprices_from_db_multiple_to_db.py delete mode 100755 sysinit/futures/adjustedprices_from_mongo_multiple_to_mongo.py rename sysinit/futures/{multipleprices_from_arcticprices_and_csv_calendars_to_arctic.py => multipleprices_from_db_prices_and_csv_calendars_to_db.py} (73%) rename sysinit/futures/{rollcalendars_from_mongodb_multipleprices.py => rollcalendars_from_db_multipleprices.py} (77%) rename sysinit/futures/{spotfx_from_csvAndInvestingDotCom_to_arctic.py => spotfx_from_csvAndInvestingDotCom_to_db.py} (83%) rename sysinit/futures/{spotfx_from_arctic_to_csv.py => spotfx_from_db_to_csv.py} (73%) diff --git a/docs/data.md b/docs/data.md index e513450157..6f8472c074 100644 --- a/docs/data.md +++ b/docs/data.md @@ -416,7 +416,7 @@ The next stage is to create and store *multiple prices*. Multiple prices are the ### Creating multiple prices from contract prices -The [relevant script is here](/sysinit/futures/multipleprices_from_arcticprices_and_csv_calendars_to_arctic.py). +The [relevant script is here](/sysinit/futures/multipleprices_from_db_prices_and_csv_calendars_to_db.py). The script should be reasonably self explanatory in terms of data pipelines, but it's worth briefly reviewing what it does: @@ -471,12 +471,14 @@ build_and_write_roll_calendar(instrument_code, output_datapath=roll_calendars_from_arctic) ``` We use our updated prices and the roll calendar just built to [calculate multiple prices](#/sysinit/futures/multipleprices_from_arcticprices_and_csv_calendars_to_arctic): + ```python -from sysinit.futures.multipleprices_from_arcticprices_and_csv_calendars_to_arctic import process_multiple_prices_single_instrument +from sysinit.futures.multipleprices_from_db_prices_and_csv_calendars_to_db import + process_multiple_prices_single_instrument -process_multiple_prices_single_instrument(instrument_code, - csv_multiple_data_path=multiple_prices_from_arctic, ADD_TO_ARCTIC=False, - csv_roll_data_path=roll_calendars_from_arctic, ADD_TO_CSV=True) +process_multiple_prices_single_instrument(instrument_code, + csv_multiple_data_path=multiple_prices_from_arctic, ADD_TO_ARCTIC=False, + csv_roll_data_path=roll_calendars_from_arctic, ADD_TO_CSV=True) ``` ...which we splice onto the repo data (checking that the price and forward contracts match): @@ -521,7 +523,7 @@ init_arctic_with_csv_prices_for_code(instrument_code, multiple_price_datapath=sp ## Creating and storing back adjusted prices -Once we have multiple prices we can then create a backadjusted price series. The [relevant script](/sysinit/futures/adjustedprices_from_mongo_multiple_to_mongo.py) will read multiple prices from Arctic, do the backadjustment, and then write the prices to Arctic (and optionally to .csv if you want to use that for backup or simulation purposes). It's easy to modify this to read/write to/from different sources. +Once we have multiple prices we can then create a backadjusted price series. The [relevant script](/sysinit/futures/adjustedprices_from_db_multiple_to_db.py) will read multiple prices from Arctic, do the backadjustment, and then write the prices to Arctic (and optionally to .csv if you want to use that for backup or simulation purposes). It's easy to modify this to read/write to/from different sources. ### Changing the stitching method @@ -544,7 +546,7 @@ data=csvFxPricesData() data.get_fx_prices("GBPUSD") ``` -Save the files in a directory with no other content, using the filename format "GBPUSD.csv". Using [this simple script](/sysinit/futures/spotfx_from_csvAndInvestingDotCom_to_arctic.py) they are written to Arctic and/or .csv files. You will need to modify the script to point to the right directory, and you can also change the column and formatting parameters to use data from other sources. +Save the files in a directory with no other content, using the filename format "GBPUSD.csv". Using [this simple script](/sysinit/futures/spotfx_from_csvAndInvestingDotCom_to_db.py) they are written to Arctic and/or .csv files. You will need to modify the script to point to the right directory, and you can also change the column and formatting parameters to use data from other sources. You can also run the script with `ADD_EXTRA_DATA = False, ADD_TO_CSV = True`. Then it will just do a straight copy from provided .csv data to Arctic. Your data will be stale, but in production it will automatically be updated with data from IB (as long as the provided data isn't more than a year out of date, since IB will give you only a year of daily prices). diff --git a/sysinit/futures/adjustedprices_from_db_multiple_to_db.py b/sysinit/futures/adjustedprices_from_db_multiple_to_db.py new file mode 100755 index 0000000000..e3e66b2e12 --- /dev/null +++ b/sysinit/futures/adjustedprices_from_db_multiple_to_db.py @@ -0,0 +1,63 @@ +""" +We create adjusted prices using multiple prices stored in database + +We then store those adjusted prices in database and/or csv + +""" +from syscore.constants import arg_not_supplied +from sysdata.csv.csv_adjusted_prices import csvFuturesAdjustedPricesData + +from sysobjects.adjusted_prices import futuresAdjustedPrices + +from sysproduction.data.prices import diagPrices + +diag_prices = diagPrices() + +def _get_data_inputs(csv_adj_data_path): + db_multiple_prices = diag_prices.db_futures_multiple_prices_data + db_adjusted_prices = diag_prices.db_futures_adjusted_prices_data + csv_adjusted_prices = csvFuturesAdjustedPricesData(csv_adj_data_path) + + return db_multiple_prices, db_adjusted_prices, csv_adjusted_prices + + +def process_adjusted_prices_all_instruments(csv_adj_data_path=arg_not_supplied, ADD_TO_DB=True, ADD_TO_CSV=False): + db_multiple_prices, _notused, _alsonotused = _get_data_inputs(csv_adj_data_path) + instrument_list = db_multiple_prices.get_list_of_instruments() + for instrument_code in instrument_list: + print(instrument_code) + process_adjusted_prices_single_instrument(instrument_code, csv_adj_data_path=csv_adj_data_path, + ADD_TO_DB=ADD_TO_DB, ADD_TO_CSV=ADD_TO_CSV) + + +def process_adjusted_prices_single_instrument(instrument_code, csv_adj_data_path=arg_not_supplied, + multiple_prices=arg_not_supplied, ADD_TO_DB=True, ADD_TO_CSV=False): + ( + arctic_multiple_prices, + parquet_adjusted_prices, + csv_adjusted_prices, + ) = _get_data_inputs(csv_adj_data_path) + if multiple_prices is arg_not_supplied: + multiple_prices = arctic_multiple_prices.get_multiple_prices(instrument_code) + adjusted_prices = futuresAdjustedPrices.stitch_multiple_prices( + multiple_prices, forward_fill=True + ) + + print(adjusted_prices) + + if ADD_TO_DB: + parquet_adjusted_prices.add_adjusted_prices( + instrument_code, adjusted_prices, ignore_duplication=True + ) + if ADD_TO_CSV: + csv_adjusted_prices.add_adjusted_prices( + instrument_code, adjusted_prices, ignore_duplication=True + ) + + return adjusted_prices + + +if __name__ == "__main__": + input("Will overwrite existing prices are you sure?! CTL-C to abort") + # modify flags and datapath as required + process_adjusted_prices_all_instruments(csv_adj_data_path=arg_not_supplied, ADD_TO_DB=True, ADD_TO_CSV=True) diff --git a/sysinit/futures/adjustedprices_from_mongo_multiple_to_mongo.py b/sysinit/futures/adjustedprices_from_mongo_multiple_to_mongo.py deleted file mode 100755 index 96663744ef..0000000000 --- a/sysinit/futures/adjustedprices_from_mongo_multiple_to_mongo.py +++ /dev/null @@ -1,75 +0,0 @@ -""" -We create adjusted prices using multiple prices stored in arctic - -We then store those adjusted prices in arctic and/or csv - -""" -from syscore.constants import arg_not_supplied -from sysdata.arctic.arctic_multiple_prices import arcticFuturesMultiplePricesData -from sysproduction.data.production_data_objects import get_class_for_data_type, FUTURES_ADJUSTED_PRICE_DATA -from sysdata.csv.csv_adjusted_prices import csvFuturesAdjustedPricesData - -from sysobjects.adjusted_prices import futuresAdjustedPrices - - -def _get_data_inputs(csv_adj_data_path): - arctic_multiple_prices = arcticFuturesMultiplePricesData() - parquet_adjusted_prices = get_class_for_data_type(FUTURES_ADJUSTED_PRICE_DATA) - csv_adjusted_prices = csvFuturesAdjustedPricesData(csv_adj_data_path) - - return arctic_multiple_prices, parquet_adjusted_prices, csv_adjusted_prices - - -def process_adjusted_prices_all_instruments( - csv_adj_data_path=arg_not_supplied, ADD_TO_ARCTIC=True, ADD_TO_CSV=False -): - arctic_multiple_prices, _notused, _alsonotused = _get_data_inputs(csv_adj_data_path) - instrument_list = arctic_multiple_prices.get_list_of_instruments() - for instrument_code in instrument_list: - print(instrument_code) - process_adjusted_prices_single_instrument( - instrument_code, - csv_adj_data_path=csv_adj_data_path, - ADD_TO_ARCTIC=ADD_TO_ARCTIC, - ADD_TO_CSV=ADD_TO_CSV, - ) - - -def process_adjusted_prices_single_instrument( - instrument_code, - csv_adj_data_path=arg_not_supplied, - multiple_prices=arg_not_supplied, - ADD_TO_ARCTIC=True, - ADD_TO_CSV=False, -): - ( - arctic_multiple_prices, - parquet_adjusted_prices, - csv_adjusted_prices, - ) = _get_data_inputs(csv_adj_data_path) - if multiple_prices is arg_not_supplied: - multiple_prices = arctic_multiple_prices.get_multiple_prices(instrument_code) - adjusted_prices = futuresAdjustedPrices.stitch_multiple_prices( - multiple_prices, forward_fill=True - ) - - print(adjusted_prices) - - if ADD_TO_ARCTIC: - parquet_adjusted_prices.add_adjusted_prices( - instrument_code, adjusted_prices, ignore_duplication=True - ) - if ADD_TO_CSV: - csv_adjusted_prices.add_adjusted_prices( - instrument_code, adjusted_prices, ignore_duplication=True - ) - - return adjusted_prices - - -if __name__ == "__main__": - input("Will overwrite existing prices are you sure?! CTL-C to abort") - # modify flags and datapath as required - process_adjusted_prices_all_instruments( - ADD_TO_ARCTIC=True, ADD_TO_CSV=True, csv_adj_data_path=arg_not_supplied - ) diff --git a/sysinit/futures/multipleprices_from_arcticprices_and_csv_calendars_to_arctic.py b/sysinit/futures/multipleprices_from_db_prices_and_csv_calendars_to_db.py similarity index 73% rename from sysinit/futures/multipleprices_from_arcticprices_and_csv_calendars_to_arctic.py rename to sysinit/futures/multipleprices_from_db_prices_and_csv_calendars_to_db.py index a3b9cc5023..95fd700993 100755 --- a/sysinit/futures/multipleprices_from_arcticprices_and_csv_calendars_to_arctic.py +++ b/sysinit/futures/multipleprices_from_db_prices_and_csv_calendars_to_db.py @@ -24,80 +24,63 @@ from sysdata.csv.csv_roll_calendars import csvRollCalendarData from sysdata.csv.csv_multiple_prices import csvFuturesMultiplePricesData from sysdata.csv.csv_roll_parameters import csvRollParametersData -from sysdata.arctic.arctic_multiple_prices import arcticFuturesMultiplePricesData from sysinit.futures.build_roll_calendars import adjust_to_price_series from sysobjects.multiple_prices import futuresMultiplePrices from sysdata.data_blob import dataBlob diag_prices = diagPrices() -parquet_futures_contract_price_data = diag_prices.db_futures_contract_price_data def _get_data_inputs(csv_roll_data_path, csv_multiple_data_path): csv_roll_calendars = csvRollCalendarData(csv_roll_data_path) - arctic_individual_futures_prices = parquet_futures_contract_price_data - arctic_multiple_prices = arcticFuturesMultiplePricesData() + db_individual_futures_prices = diag_prices.db_futures_contract_price_data + db_multiple_prices = diag_prices.db_futures_multiple_prices_data csv_multiple_prices = csvFuturesMultiplePricesData(csv_multiple_data_path) return ( csv_roll_calendars, - arctic_individual_futures_prices, - arctic_multiple_prices, + db_individual_futures_prices, + db_multiple_prices, csv_multiple_prices, ) -def process_multiple_prices_all_instruments( - csv_multiple_data_path=arg_not_supplied, - csv_roll_data_path=arg_not_supplied, - ADD_TO_ARCTIC=True, - ADD_TO_CSV=False, -): +def process_multiple_prices_all_instruments(csv_multiple_data_path=arg_not_supplied, + csv_roll_data_path=arg_not_supplied, ADD_TO_DB=True, ADD_TO_CSV=False): ( _not_used1, - arctic_individual_futures_prices, + db_individual_futures_prices, _not_used2, _not_used3, ) = _get_data_inputs(csv_roll_data_path, csv_multiple_data_path) instrument_list = ( - arctic_individual_futures_prices.get_list_of_instrument_codes_with_merged_price_data() + db_individual_futures_prices.get_list_of_instrument_codes_with_merged_price_data() ) for instrument_code in instrument_list: print(instrument_code) - process_multiple_prices_single_instrument( - instrument_code, - csv_multiple_data_path=csv_multiple_data_path, - csv_roll_data_path=csv_roll_data_path, - ADD_TO_ARCTIC=ADD_TO_ARCTIC, - ADD_TO_CSV=ADD_TO_CSV, - ) + process_multiple_prices_single_instrument(instrument_code, csv_multiple_data_path=csv_multiple_data_path, + csv_roll_data_path=csv_roll_data_path, ADD_TO_DB=ADD_TO_DB, + ADD_TO_CSV=ADD_TO_CSV) -def process_multiple_prices_single_instrument( - instrument_code, - target_instrument_code=arg_not_supplied, - adjust_calendar_to_prices=True, - csv_multiple_data_path=arg_not_supplied, - csv_roll_data_path=arg_not_supplied, - roll_parameters=arg_not_supplied, - roll_calendar=arg_not_supplied, - ADD_TO_ARCTIC=True, - ADD_TO_CSV=False, -): +def process_multiple_prices_single_instrument(instrument_code, target_instrument_code=arg_not_supplied, + adjust_calendar_to_prices=True, csv_multiple_data_path=arg_not_supplied, + csv_roll_data_path=arg_not_supplied, roll_parameters=arg_not_supplied, + roll_calendar=arg_not_supplied, ADD_TO_DB=True, ADD_TO_CSV=False): if target_instrument_code is arg_not_supplied: target_instrument_code = instrument_code ( csv_roll_calendars, - arctic_individual_futures_prices, - arctic_multiple_prices, + db_individual_futures_prices, + db_multiple_prices, csv_multiple_prices, ) = _get_data_inputs(csv_roll_data_path, csv_multiple_data_path) dict_of_futures_contract_prices = ( - arctic_individual_futures_prices.get_merged_prices_for_instrument( + db_individual_futures_prices.get_merged_prices_for_instrument( instrument_code ) ) @@ -131,8 +114,8 @@ def process_multiple_prices_single_instrument( print(multiple_prices) - if ADD_TO_ARCTIC: - arctic_multiple_prices.add_multiple_prices( + if ADD_TO_DB: + db_multiple_prices.add_multiple_prices( target_instrument_code, multiple_prices, ignore_duplication=True ) if ADD_TO_CSV: @@ -144,9 +127,9 @@ def process_multiple_prices_single_instrument( def adjust_roll_calendar(instrument_code, roll_calendar): - arctic_prices_per_contract = parquet_futures_contract_price_data + db_prices_per_contract = diag_prices.db_futures_contract_price_data print("Getting prices to adjust roll calendar") - dict_of_prices = arctic_prices_per_contract.get_merged_prices_for_instrument( + dict_of_prices = db_prices_per_contract.get_merged_prices_for_instrument( instrument_code ) dict_of_futures_contract_prices = dict_of_prices.final_prices() @@ -203,7 +186,5 @@ def add_phantom_row( csv_roll_data_path = arg_not_supplied # modify flags as required - process_multiple_prices_all_instruments( - csv_multiple_data_path=csv_multiple_data_path, - csv_roll_data_path=csv_roll_data_path, - ) + process_multiple_prices_all_instruments(csv_multiple_data_path=csv_multiple_data_path, + csv_roll_data_path=csv_roll_data_path) diff --git a/sysinit/futures/repocsv_multiple_prices.py b/sysinit/futures/repocsv_multiple_prices.py index 52187d13b6..11c4c53954 100755 --- a/sysinit/futures/repocsv_multiple_prices.py +++ b/sysinit/futures/repocsv_multiple_prices.py @@ -1,14 +1,16 @@ """ -Copy from csv repo files to arctic for multiple prices +Copy from csv repo files to db for multiple prices """ from sysdata.csv.csv_multiple_prices import csvFuturesMultiplePricesData -from sysdata.arctic.arctic_multiple_prices import arcticFuturesMultiplePricesData +from sysproduction.data.prices import diagPrices + if __name__ == "__main__": input("Will overwrite existing prices are you sure?! CTL-C to abort") + diag_prices = diagPrices() - arctic_multiple_prices = arcticFuturesMultiplePricesData() + db_multiple_prices = diag_prices.db_futures_multiple_prices_data csv_multiple_prices = csvFuturesMultiplePricesData() instrument_code = input("Instrument code? ") @@ -23,6 +25,6 @@ print(multiple_prices) - arctic_multiple_prices.add_multiple_prices( + db_multiple_prices.add_multiple_prices( instrument_code, multiple_prices, ignore_duplication=True ) diff --git a/sysinit/futures/repocsv_spotfx_prices.py b/sysinit/futures/repocsv_spotfx_prices.py index 04f29d1906..fa205508f8 100755 --- a/sysinit/futures/repocsv_spotfx_prices.py +++ b/sysinit/futures/repocsv_spotfx_prices.py @@ -3,13 +3,14 @@ WARNING WILL OVERWRITE EXISTING! """ -from sysdata.arctic.arctic_spotfx_prices import arcticFxPricesData from sysdata.csv.csv_spot_fx import csvFxPricesData +from sysproduction.data.currency_data import fxPricesData + +db_fx_price_data = fxPricesData() if __name__ == "__main__": input("Will overwrite existing prices are you sure?! CTL-C to abort") - arctic_fx_prices = arcticFxPricesData() csv_fx_prices = csvFxPricesData() currency_code = input("Currency code? ") @@ -22,6 +23,6 @@ fx_prices = csv_fx_prices.get_fx_prices(currency_code) print(fx_prices) - arctic_fx_prices.add_fx_prices( - currency_code, fx_prices, ignore_duplication=True + db_fx_price_data.add_fx_prices( + code=currency_code, fx_price_data=fx_prices, ignore_duplication=True ) diff --git a/sysinit/futures/rollcalendars_from_mongodb_multipleprices.py b/sysinit/futures/rollcalendars_from_db_multipleprices.py similarity index 77% rename from sysinit/futures/rollcalendars_from_mongodb_multipleprices.py rename to sysinit/futures/rollcalendars_from_db_multipleprices.py index 5365f553ed..65fc7be8ab 100644 --- a/sysinit/futures/rollcalendars_from_mongodb_multipleprices.py +++ b/sysinit/futures/rollcalendars_from_db_multipleprices.py @@ -2,7 +2,9 @@ from sysobjects.roll_calendars import rollCalendar from sysdata.csv.csv_roll_calendars import csvRollCalendarData from sysdata.csv.csv_roll_parameters import csvRollParametersData -from sysdata.arctic.arctic_multiple_prices import arcticFuturesMultiplePricesData + +from sysproduction.data.prices import diagPrices + """ Generate the roll calendars from existing data @@ -11,16 +13,18 @@ if __name__ == "__main__": input("Will overwrite existing data are you sure?! CTL-C to abort") + diag_prices = diagPrices() + output_datapath = arg_not_supplied csv_roll_calendars = csvRollCalendarData(arg_not_supplied) csv_rollparameters = csvRollParametersData() - arctic_multiple_prices = arcticFuturesMultiplePricesData() + db_multiple_prices = diag_prices.db_futures_multiple_prices_data - instrument_list = arctic_multiple_prices.get_list_of_instruments() + instrument_list = db_multiple_prices.get_list_of_instruments() for instrument_code in instrument_list: print(instrument_code) - multiple_prices = arctic_multiple_prices.get_multiple_prices(instrument_code) + multiple_prices = db_multiple_prices.get_multiple_prices(instrument_code) roll_parameters = csv_rollparameters.get_roll_parameters(instrument_code) roll_calendar = rollCalendar.back_out_from_multiple_prices(multiple_prices) diff --git a/sysinit/futures/safely_modify_roll_parameters.py b/sysinit/futures/safely_modify_roll_parameters.py index 9d6463e219..e95ba8b10d 100644 --- a/sysinit/futures/safely_modify_roll_parameters.py +++ b/sysinit/futures/safely_modify_roll_parameters.py @@ -11,10 +11,10 @@ from sysinit.futures.rollcalendars_from_arcticprices_to_csv import ( build_and_write_roll_calendar, ) -from sysinit.futures.multipleprices_from_arcticprices_and_csv_calendars_to_arctic import ( +from sysinit.futures.multipleprices_from_db_prices_and_csv_calendars_to_db import ( process_multiple_prices_single_instrument, ) -from sysinit.futures.adjustedprices_from_mongo_multiple_to_mongo import ( +from sysinit.futures.adjustedprices_from_db_multiple_to_db import ( process_adjusted_prices_single_instrument, ) from sysobjects.rolls import rollParameters @@ -55,18 +55,12 @@ def safely_modify_roll_parameters(data: dataBlob): print("Doing nothing") # return None - new_multiple_prices = process_multiple_prices_single_instrument( - instrument_code=instrument_code, - csv_roll_data_path=output_path_for_temp_csv_files, - ADD_TO_CSV=False, - ADD_TO_ARCTIC=False, - ) - new_adjusted_prices = process_adjusted_prices_single_instrument( - instrument_code, - multiple_prices=new_multiple_prices, - ADD_TO_CSV=False, - ADD_TO_ARCTIC=False, - ) + new_multiple_prices = process_multiple_prices_single_instrument(instrument_code=instrument_code, + csv_roll_data_path=output_path_for_temp_csv_files, + ADD_TO_DB=False, ADD_TO_CSV=False) + new_adjusted_prices = process_adjusted_prices_single_instrument(instrument_code, + multiple_prices=new_multiple_prices, + ADD_TO_DB=False, ADD_TO_CSV=False) diag_prices = diagPrices(data) existing_multiple_prices = diag_prices.get_multiple_prices(instrument_code) diff --git a/sysinit/futures/spotfx_from_csvAndInvestingDotCom_to_arctic.py b/sysinit/futures/spotfx_from_csvAndInvestingDotCom_to_db.py similarity index 83% rename from sysinit/futures/spotfx_from_csvAndInvestingDotCom_to_arctic.py rename to sysinit/futures/spotfx_from_csvAndInvestingDotCom_to_db.py index 01fe58aaad..dfc74d0c41 100644 --- a/sysinit/futures/spotfx_from_csvAndInvestingDotCom_to_arctic.py +++ b/sysinit/futures/spotfx_from_csvAndInvestingDotCom_to_db.py @@ -1,30 +1,27 @@ """ Get FX prices from investing.com files, and from csv, merge and write to Arctic and/or optionally overwrite csv files """ -from sysdata.arctic.arctic_spotfx_prices import arcticFxPricesData + from sysdata.csv.csv_spot_fx import csvFxPricesData, ConfigCsvFXPrices import pandas as pd - +from sysproduction.data.currency_data import fxPricesData # You may need to change this! # There must be ONLY fx prices here, with filenames "GBPUSD.csv" etc +db_fx_prices_data = fxPricesData() investing_dot_com_config = ConfigCsvFXPrices( price_column="Close", date_column="Date Time", date_format="%Y-%m-%d" ) -def spotfx_from_csv_and_investing_dot_com( - datapath, ADD_TO_ARCTIC=True, ADD_TO_CSV=True, ADD_EXTRA_DATA=True -): +def spotfx_from_csv_and_investing_dot_com(datapath, ADD_TO_DB=True, ADD_TO_CSV=True, ADD_EXTRA_DATA=True): # You can adapt this for different providers by changing these parameters if ADD_EXTRA_DATA: investingDotCom_csv_fx_prices = csvFxPricesData( datapath=datapath, config=investing_dot_com_config ) - if ADD_TO_ARCTIC: - arctic_fx_prices = arcticFxPricesData() my_csv_fx_prices_data = csvFxPricesData() list_of_ccy_codes = my_csv_fx_prices_data.get_list_of_fxcodes() @@ -60,7 +57,7 @@ def spotfx_from_csv_and_investing_dot_com( currency_code, fx_prices, ignore_duplication=True ) - if ADD_TO_ARCTIC: - arctic_fx_prices.add_fx_prices( - currency_code, fx_prices, ignore_duplication=True + if ADD_TO_DB: + db_fx_prices_data.add_fx_prices( + code=currency_code, fx_price_data=fx_prices, ignore_duplication=True ) diff --git a/sysinit/futures/spotfx_from_arctic_to_csv.py b/sysinit/futures/spotfx_from_db_to_csv.py similarity index 73% rename from sysinit/futures/spotfx_from_arctic_to_csv.py rename to sysinit/futures/spotfx_from_db_to_csv.py index 858b219147..be557ca985 100644 --- a/sysinit/futures/spotfx_from_arctic_to_csv.py +++ b/sysinit/futures/spotfx_from_db_to_csv.py @@ -3,18 +3,20 @@ WARNING WILL OVERWRITE EXISTING! """ -from sysdata.arctic.arctic_spotfx_prices import arcticFxPricesData +from sysproduction.data.currency_data import fxPricesData from sysdata.csv.csv_spot_fx import csvFxPricesData + +db_fx_prices_data = fxPricesData() + if __name__ == "__main__": input("Will overwrite existing data are you sure?! CTL-C to abort") - arctic_fx_prices = arcticFxPricesData() csv_fx_prices = csvFxPricesData() list_of_ccy_codes = csv_fx_prices.get_list_of_fxcodes() for currency_code in list_of_ccy_codes: - fx_prices = arctic_fx_prices.get_fx_prices(currency_code) + fx_prices = db_fx_prices_data.get_fx_prices(currency_code) print(fx_prices) csv_fx_prices.add_fx_prices(currency_code, fx_prices, ignore_duplication=True) diff --git a/sysinit/transfer/backup_arctic_to_parquet.py b/sysinit/transfer/backup_arctic_to_parquet.py index ff2e878ed7..6d62928eb8 100644 --- a/sysinit/transfer/backup_arctic_to_parquet.py +++ b/sysinit/transfer/backup_arctic_to_parquet.py @@ -229,9 +229,9 @@ def backup_futures_contract_prices_for_contract_to_parquet( # fx def backup_fx_to_csv(data): - fx_codes = data.arctic_fx_prices.get_list_of_fxcodes() + fx_codes = data.db_fx_prices.get_list_of_fxcodes() for fx_code in fx_codes: - arctic_data = data.arctic_fx_prices.get_fx_prices(fx_code) + arctic_data = data.db_fx_prices.get_fx_prices(fx_code) csv_data = data.csv_fx_prices.get_fx_prices(fx_code) if check_ts_equals(arctic_data, csv_data): data.log.debug("No fx backup needed for %s" % fx_code) From 617240cb7d5ee42b263da5b3bf573d7cbdbe53c1 Mon Sep 17 00:00:00 2001 From: Rob Carver Date: Mon, 20 Nov 2023 11:40:38 +0000 Subject: [PATCH 099/235] fixed backup --- sysproduction/backup_arctic_to_csv.py | 4 ++-- sysproduction/run_backups.py | 8 ++++---- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/sysproduction/backup_arctic_to_csv.py b/sysproduction/backup_arctic_to_csv.py index 6162505051..ce177d6d3b 100644 --- a/sysproduction/backup_arctic_to_csv.py +++ b/sysproduction/backup_arctic_to_csv.py @@ -35,7 +35,7 @@ def backup_db_to_csv(): data = dataBlob(log_name="backup_db_to_csv") - backup_object = backupArcticToCsv(data) + backup_object = backupDbToCsv(data) backup_object.backup_db_to_csv() return None @@ -48,7 +48,7 @@ def quick_backup_of_all_price_data_including_expired(): backup_futures_contract_prices_to_csv(backup_data, ignore_long_expired=False) -class backupArcticToCsv: +class backupDbToCsv: def __init__(self, data): self.data = data diff --git a/sysproduction/run_backups.py b/sysproduction/run_backups.py index 859e4fd46d..ef14fa9313 100644 --- a/sysproduction/run_backups.py +++ b/sysproduction/run_backups.py @@ -1,5 +1,5 @@ from syscontrol.run_process import processToRun -from sysproduction.backup_arctic_to_csv import backupArcticToCsv +from sysproduction.backup_arctic_to_csv import backupDbToCsv from sysproduction.backup_mongo_data_as_dump import backupMongo from sysproduction.backup_state_files import backupStateFiles from sysdata.data_blob import dataBlob @@ -14,16 +14,16 @@ def run_backups(): def get_list_of_timer_functions_for_backup(): - data_arctic_backups = dataBlob(log_name="backup_arctic_to_csv") + data_db_backups = dataBlob(log_name="backup_db_to_csv") data_state_files = dataBlob(log_name="backup_files") data_mongo_dump = dataBlob(log_name="backup_mongo_data_as_dump") - arctic_backup_object = backupArcticToCsv(data_arctic_backups) + db_backup_object = backupDbToCsv(data_db_backups) statefile_backup_object = backupStateFiles(data_state_files) mongodump_backup_object = backupMongo(data_mongo_dump) list_of_timer_names_and_functions = [ - ("backup_arctic_to_csv", arctic_backup_object), + ("backup_db_to_csv", db_backup_object), ("backup_mongo_data_as_dump", mongodump_backup_object), ("backup_files", statefile_backup_object), ] From e79867de95576cf49cda5432139f39435276f3e2 Mon Sep 17 00:00:00 2001 From: Rob Carver Date: Mon, 20 Nov 2023 11:43:38 +0000 Subject: [PATCH 100/235] fixed backup --- docs/production.md | 4 +++- .../{backup_arctic_to_csv.py => backup_db_to_csv.py} | 0 sysproduction/interactive_controls.py | 2 +- sysproduction/linux/scripts/backup_arctic_to_csv | 3 --- sysproduction/linux/scripts/backup_db_to_csv | 3 +++ sysproduction/run_backups.py | 2 +- 6 files changed, 8 insertions(+), 6 deletions(-) rename sysproduction/{backup_arctic_to_csv.py => backup_db_to_csv.py} (100%) delete mode 100755 sysproduction/linux/scripts/backup_arctic_to_csv create mode 100755 sysproduction/linux/scripts/backup_db_to_csv diff --git a/docs/production.md b/docs/production.md index edc6e3eb62..12c7e28197 100644 --- a/docs/production.md +++ b/docs/production.md @@ -2401,8 +2401,10 @@ Every day we generate echo files with extension .txt; this process renames ones ### Backup Arctic data to .csv files Python: + ```python -from sysproduction.backup_arctic_to_csv import backup_arctic_to_csv +from sysproduction.backup_db_to_csv import backup_arctic_to_csv + backup_arctic_to_csv() ``` diff --git a/sysproduction/backup_arctic_to_csv.py b/sysproduction/backup_db_to_csv.py similarity index 100% rename from sysproduction/backup_arctic_to_csv.py rename to sysproduction/backup_db_to_csv.py diff --git a/sysproduction/interactive_controls.py b/sysproduction/interactive_controls.py index 7b79fc5b49..ca097d4ba8 100644 --- a/sysproduction/interactive_controls.py +++ b/sysproduction/interactive_controls.py @@ -26,7 +26,7 @@ from sysobjects.production.process_control import processNotRunning from sysobjects.production.tradeable_object import instrumentStrategy -from sysproduction.backup_arctic_to_csv import ( +from sysproduction.backup_db_to_csv import ( backup_spread_cost_data, get_data_and_create_csv_directories, ) diff --git a/sysproduction/linux/scripts/backup_arctic_to_csv b/sysproduction/linux/scripts/backup_arctic_to_csv deleted file mode 100755 index 85fe033e76..0000000000 --- a/sysproduction/linux/scripts/backup_arctic_to_csv +++ /dev/null @@ -1,3 +0,0 @@ -#!/bin/bash -. ~/.profile -. p sysproduction.backup_arctic_to_csv.backup_arctic_to_csv \ No newline at end of file diff --git a/sysproduction/linux/scripts/backup_db_to_csv b/sysproduction/linux/scripts/backup_db_to_csv new file mode 100755 index 0000000000..d628effc0b --- /dev/null +++ b/sysproduction/linux/scripts/backup_db_to_csv @@ -0,0 +1,3 @@ +#!/bin/bash +. ~/.profile +. p sysproduction.backup_db_to_csv.backup_db_to_csv \ No newline at end of file diff --git a/sysproduction/run_backups.py b/sysproduction/run_backups.py index ef14fa9313..5a53e7acf3 100644 --- a/sysproduction/run_backups.py +++ b/sysproduction/run_backups.py @@ -1,5 +1,5 @@ from syscontrol.run_process import processToRun -from sysproduction.backup_arctic_to_csv import backupDbToCsv +from sysproduction.backup_db_to_csv import backupDbToCsv from sysproduction.backup_mongo_data_as_dump import backupMongo from sysproduction.backup_state_files import backupStateFiles from sysdata.data_blob import dataBlob From db1e3b687115dbeeffcd96fd82c584128cecde95 Mon Sep 17 00:00:00 2001 From: Rob Carver Date: Mon, 20 Nov 2023 11:51:54 +0000 Subject: [PATCH 101/235] fix for class adding --- sysdata/data_blob.py | 8 +-- sysproduction/backup_db_to_csv.py | 94 +++++++++++++++---------------- 2 files changed, 51 insertions(+), 51 deletions(-) diff --git a/sysdata/data_blob.py b/sysdata/data_blob.py index ba095d0b3d..f2e6e70509 100644 --- a/sysdata/data_blob.py +++ b/sysdata/data_blob.py @@ -86,10 +86,10 @@ def add_class_list(self, class_list: list, use_prefix: str = arg_not_supplied): def add_class_object(self, class_object, use_prefix: str = arg_not_supplied): class_name = get_class_name(class_object) - attr_name = self._get_new_name(class_name, use_prefix=use_prefix) - if not self._already_existing_class_name(attr_name): + new_name = self._get_new_name(class_name, use_prefix=use_prefix) + if not self._already_existing_class_name(new_name): resolved_instance = self._get_resolved_instance_of_class(class_object) - self._resolve_names_and_add(resolved_instance, class_name) + self._add_new_class_with_new_name(resolved_instance=resolved_instance, attr_name=new_name) def _get_resolved_instance_of_class(self, class_object): class_adding_method = self._get_class_adding_method(class_object) @@ -232,7 +232,7 @@ def _get_specific_logger(self, class_object): return log - def _resolve_names_and_add(self, resolved_instance, class_name: str): + def _resolve_names_and_add(self, resolved_instance, new_name: str): attr_name = self._get_new_name(class_name) self._add_new_class_with_new_name(resolved_instance, attr_name) diff --git a/sysproduction/backup_db_to_csv.py b/sysproduction/backup_db_to_csv.py index ce177d6d3b..296dc1f492 100644 --- a/sysproduction/backup_db_to_csv.py +++ b/sysproduction/backup_db_to_csv.py @@ -76,56 +76,56 @@ def backup_db_to_csv(self): def get_data_and_create_csv_directories(logname): - csv_dump_dir = get_csv_dump_dir() - - class_paths = dict( - csvBrokerHistoricOrdersData="broker_orders", - csvCapitalData="capital", - csvContractHistoricOrdersData="contract_orders", - csvContractPositionData="contract_positions", - csvFuturesAdjustedPricesData="adjusted_prices", - csvFuturesContractData="contracts_data", - csvFuturesContractPriceData="contract_prices", - csvFuturesMultiplePricesData="multiple_prices", - csvFxPricesData="fx_prices", - csvOptimalPositionData="optimal_positions", - csvRollStateData="roll_state", - csvSpreadCostData="spread_costs", - csvSpreadsForInstrumentData="spreads", - csvStrategyHistoricOrdersData="strategy_orders", - csvStrategyPositionData="strategy_positions", - ) +csv_dump_dir = get_csv_dump_dir() + +class_paths = dict( + csvBrokerHistoricOrdersData="broker_orders", + csvCapitalData="capital", + csvContractHistoricOrdersData="contract_orders", + csvContractPositionData="contract_positions", + csvFuturesAdjustedPricesData="adjusted_prices", + csvFuturesContractData="contracts_data", + csvFuturesContractPriceData="contract_prices", + csvFuturesMultiplePricesData="multiple_prices", + csvFxPricesData="fx_prices", + csvOptimalPositionData="optimal_positions", + csvRollStateData="roll_state", + csvSpreadCostData="spread_costs", + csvSpreadsForInstrumentData="spreads", + csvStrategyHistoricOrdersData="strategy_orders", + csvStrategyPositionData="strategy_positions", +) - for class_name, path in class_paths.items(): - dir_name = os.path.join(csv_dump_dir, path) - class_paths[class_name] = dir_name - if not os.path.exists(dir_name): - os.makedirs(dir_name) +for class_name, path in class_paths.items(): + dir_name = os.path.join(csv_dump_dir, path) + class_paths[class_name] = dir_name + if not os.path.exists(dir_name): + os.makedirs(dir_name) - data = dataBlob( - csv_data_paths=class_paths, log_name=logname - ) +data = dataBlob( + csv_data_paths=class_paths, log_name=logname +) - data.add_class_list( - [ - csvBrokerHistoricOrdersData, - csvCapitalData, - csvContractHistoricOrdersData, - csvContractPositionData, - csvFuturesAdjustedPricesData, - csvFuturesContractData, - csvFuturesContractPriceData, - csvFuturesMultiplePricesData, - csvFxPricesData, - csvOptimalPositionData, - csvRollStateData, - csvSpreadCostData, - csvSpreadsForInstrumentData, - csvStrategyHistoricOrdersData, - csvStrategyPositionData, - ] - , use_prefix="csv" - ) +data.add_class_list( + [ + csvBrokerHistoricOrdersData, + csvCapitalData, + csvContractHistoricOrdersData, + csvContractPositionData, + csvFuturesAdjustedPricesData, + csvFuturesContractData, + csvFuturesContractPriceData, + csvFuturesMultiplePricesData, + csvFxPricesData, + csvOptimalPositionData, + csvRollStateData, + csvSpreadCostData, + csvSpreadsForInstrumentData, + csvStrategyHistoricOrdersData, + csvStrategyPositionData, + ] + , use_prefix="csv" +) data.add_class_list( [ From 9c2f6fbcc60e07821f0593bfbae9e485cdfc8fa6 Mon Sep 17 00:00:00 2001 From: Rob Carver Date: Mon, 20 Nov 2023 11:53:39 +0000 Subject: [PATCH 102/235] accident indent --- sysproduction/backup_db_to_csv.py | 138 +++++++++++++++--------------- 1 file changed, 69 insertions(+), 69 deletions(-) diff --git a/sysproduction/backup_db_to_csv.py b/sysproduction/backup_db_to_csv.py index 296dc1f492..b02329b32c 100644 --- a/sysproduction/backup_db_to_csv.py +++ b/sysproduction/backup_db_to_csv.py @@ -76,80 +76,80 @@ def backup_db_to_csv(self): def get_data_and_create_csv_directories(logname): -csv_dump_dir = get_csv_dump_dir() - -class_paths = dict( - csvBrokerHistoricOrdersData="broker_orders", - csvCapitalData="capital", - csvContractHistoricOrdersData="contract_orders", - csvContractPositionData="contract_positions", - csvFuturesAdjustedPricesData="adjusted_prices", - csvFuturesContractData="contracts_data", - csvFuturesContractPriceData="contract_prices", - csvFuturesMultiplePricesData="multiple_prices", - csvFxPricesData="fx_prices", - csvOptimalPositionData="optimal_positions", - csvRollStateData="roll_state", - csvSpreadCostData="spread_costs", - csvSpreadsForInstrumentData="spreads", - csvStrategyHistoricOrdersData="strategy_orders", - csvStrategyPositionData="strategy_positions", -) + csv_dump_dir = get_csv_dump_dir() + + class_paths = dict( + csvBrokerHistoricOrdersData="broker_orders", + csvCapitalData="capital", + csvContractHistoricOrdersData="contract_orders", + csvContractPositionData="contract_positions", + csvFuturesAdjustedPricesData="adjusted_prices", + csvFuturesContractData="contracts_data", + csvFuturesContractPriceData="contract_prices", + csvFuturesMultiplePricesData="multiple_prices", + csvFxPricesData="fx_prices", + csvOptimalPositionData="optimal_positions", + csvRollStateData="roll_state", + csvSpreadCostData="spread_costs", + csvSpreadsForInstrumentData="spreads", + csvStrategyHistoricOrdersData="strategy_orders", + csvStrategyPositionData="strategy_positions", + ) -for class_name, path in class_paths.items(): - dir_name = os.path.join(csv_dump_dir, path) - class_paths[class_name] = dir_name - if not os.path.exists(dir_name): - os.makedirs(dir_name) + for class_name, path in class_paths.items(): + dir_name = os.path.join(csv_dump_dir, path) + class_paths[class_name] = dir_name + if not os.path.exists(dir_name): + os.makedirs(dir_name) -data = dataBlob( - csv_data_paths=class_paths, log_name=logname -) + data = dataBlob( + csv_data_paths=class_paths, log_name=logname + ) -data.add_class_list( - [ - csvBrokerHistoricOrdersData, - csvCapitalData, - csvContractHistoricOrdersData, - csvContractPositionData, - csvFuturesAdjustedPricesData, - csvFuturesContractData, - csvFuturesContractPriceData, - csvFuturesMultiplePricesData, - csvFxPricesData, - csvOptimalPositionData, - csvRollStateData, - csvSpreadCostData, - csvSpreadsForInstrumentData, - csvStrategyHistoricOrdersData, - csvStrategyPositionData, - ] - , use_prefix="csv" -) + data.add_class_list( + [ + csvBrokerHistoricOrdersData, + csvCapitalData, + csvContractHistoricOrdersData, + csvContractPositionData, + csvFuturesAdjustedPricesData, + csvFuturesContractData, + csvFuturesContractPriceData, + csvFuturesMultiplePricesData, + csvFxPricesData, + csvOptimalPositionData, + csvRollStateData, + csvSpreadCostData, + csvSpreadsForInstrumentData, + csvStrategyHistoricOrdersData, + csvStrategyPositionData, + ] + , use_prefix="csv" + ) - data.add_class_list( - [ - get_class_for_data_type(CAPITAL_DATA), - get_class_for_data_type(FUTURES_ADJUSTED_PRICE_DATA), - get_class_for_data_type(FUTURES_CONTRACT_PRICE_DATA), - get_class_for_data_type(FUTURES_MULTIPLE_PRICE_DATA), - get_class_for_data_type(FX_DATA), - get_class_for_data_type(SPREAD_DATA), - get_class_for_data_type(BROKER_HISTORIC_ORDERS_DATA), - get_class_for_data_type(CONTRACT_HISTORIC_ORDERS_DATA), - get_class_for_data_type(STRATEGY_HISTORIC_ORDERS_DATA), - get_class_for_data_type(CONTRACT_POSITION_DATA), - get_class_for_data_type(STRATEGY_POSITION_DATA), - get_class_for_data_type(FUTURES_CONTRACT_DATA), - get_class_for_data_type(OPTIMAL_POSITION_DATA), - get_class_for_data_type(ROLL_STATE_DATA), - get_class_for_data_type(SPREAD_DATA) - - ], - use_prefix="db" - ) + data.add_class_list( + [ + get_class_for_data_type(CAPITAL_DATA), + get_class_for_data_type(FUTURES_ADJUSTED_PRICE_DATA), + get_class_for_data_type(FUTURES_CONTRACT_PRICE_DATA), + get_class_for_data_type(FUTURES_MULTIPLE_PRICE_DATA), + get_class_for_data_type(FX_DATA), + get_class_for_data_type(SPREAD_DATA), + get_class_for_data_type(BROKER_HISTORIC_ORDERS_DATA), + get_class_for_data_type(CONTRACT_HISTORIC_ORDERS_DATA), + get_class_for_data_type(STRATEGY_HISTORIC_ORDERS_DATA), + get_class_for_data_type(CONTRACT_POSITION_DATA), + get_class_for_data_type(STRATEGY_POSITION_DATA), + get_class_for_data_type(FUTURES_CONTRACT_DATA), + get_class_for_data_type(OPTIMAL_POSITION_DATA), + get_class_for_data_type(ROLL_STATE_DATA), + get_class_for_data_type(SPREAD_DATA) + + ], + use_prefix="db" + ) - return data + return data # Write function for each thing we want to backup From 12d5221b3b8a951c818842337e7a27692f1d62dc Mon Sep 17 00:00:00 2001 From: Rob Carver Date: Mon, 20 Nov 2023 11:56:05 +0000 Subject: [PATCH 103/235] accident indent --- sysproduction/backup_db_to_csv.py | 84 +++++++++++++++---------------- 1 file changed, 42 insertions(+), 42 deletions(-) diff --git a/sysproduction/backup_db_to_csv.py b/sysproduction/backup_db_to_csv.py index b02329b32c..fdbf29947a 100644 --- a/sysproduction/backup_db_to_csv.py +++ b/sysproduction/backup_db_to_csv.py @@ -106,50 +106,50 @@ def get_data_and_create_csv_directories(logname): csv_data_paths=class_paths, log_name=logname ) - data.add_class_list( - [ - csvBrokerHistoricOrdersData, - csvCapitalData, - csvContractHistoricOrdersData, - csvContractPositionData, - csvFuturesAdjustedPricesData, - csvFuturesContractData, - csvFuturesContractPriceData, - csvFuturesMultiplePricesData, - csvFxPricesData, - csvOptimalPositionData, - csvRollStateData, - csvSpreadCostData, - csvSpreadsForInstrumentData, - csvStrategyHistoricOrdersData, - csvStrategyPositionData, - ] - , use_prefix="csv" - ) + data.add_class_list( + [ + csvBrokerHistoricOrdersData, + csvCapitalData, + csvContractHistoricOrdersData, + csvContractPositionData, + csvFuturesAdjustedPricesData, + csvFuturesContractData, + csvFuturesContractPriceData, + csvFuturesMultiplePricesData, + csvFxPricesData, + csvOptimalPositionData, + csvRollStateData, + csvSpreadCostData, + csvSpreadsForInstrumentData, + csvStrategyHistoricOrdersData, + csvStrategyPositionData, + ] + , use_prefix="csv" + ) - data.add_class_list( - [ - get_class_for_data_type(CAPITAL_DATA), - get_class_for_data_type(FUTURES_ADJUSTED_PRICE_DATA), - get_class_for_data_type(FUTURES_CONTRACT_PRICE_DATA), - get_class_for_data_type(FUTURES_MULTIPLE_PRICE_DATA), - get_class_for_data_type(FX_DATA), - get_class_for_data_type(SPREAD_DATA), - get_class_for_data_type(BROKER_HISTORIC_ORDERS_DATA), - get_class_for_data_type(CONTRACT_HISTORIC_ORDERS_DATA), - get_class_for_data_type(STRATEGY_HISTORIC_ORDERS_DATA), - get_class_for_data_type(CONTRACT_POSITION_DATA), - get_class_for_data_type(STRATEGY_POSITION_DATA), - get_class_for_data_type(FUTURES_CONTRACT_DATA), - get_class_for_data_type(OPTIMAL_POSITION_DATA), - get_class_for_data_type(ROLL_STATE_DATA), - get_class_for_data_type(SPREAD_DATA) - - ], - use_prefix="db" - ) + data.add_class_list( + [ + get_class_for_data_type(CAPITAL_DATA), + get_class_for_data_type(FUTURES_ADJUSTED_PRICE_DATA), + get_class_for_data_type(FUTURES_CONTRACT_PRICE_DATA), + get_class_for_data_type(FUTURES_MULTIPLE_PRICE_DATA), + get_class_for_data_type(FX_DATA), + get_class_for_data_type(SPREAD_DATA), + get_class_for_data_type(BROKER_HISTORIC_ORDERS_DATA), + get_class_for_data_type(CONTRACT_HISTORIC_ORDERS_DATA), + get_class_for_data_type(STRATEGY_HISTORIC_ORDERS_DATA), + get_class_for_data_type(CONTRACT_POSITION_DATA), + get_class_for_data_type(STRATEGY_POSITION_DATA), + get_class_for_data_type(FUTURES_CONTRACT_DATA), + get_class_for_data_type(OPTIMAL_POSITION_DATA), + get_class_for_data_type(ROLL_STATE_DATA), + get_class_for_data_type(SPREAD_DATA) + + ], + use_prefix="db" + ) - return data + return data # Write function for each thing we want to backup From c0773c9cae7fded4bb64f3edda5e39d9c33ef27c Mon Sep 17 00:00:00 2001 From: Rob Carver Date: Mon, 20 Nov 2023 12:05:01 +0000 Subject: [PATCH 104/235] added multiple price backup for parquet --- sysdata/parquet/parquet_multiple_prices.py | 84 ++++++++++++++++++++ sysinit/transfer/backup_arctic_to_parquet.py | 67 +++++++--------- 2 files changed, 114 insertions(+), 37 deletions(-) diff --git a/sysdata/parquet/parquet_multiple_prices.py b/sysdata/parquet/parquet_multiple_prices.py index e69de29bb2..b8e9b50b28 100644 --- a/sysdata/parquet/parquet_multiple_prices.py +++ b/sysdata/parquet/parquet_multiple_prices.py @@ -0,0 +1,84 @@ +""" +Read and write data from mongodb for 'multiple prices' + +""" +import pandas as pd +from sysdata.parquet.parquet_access import ParquetAccess +from sysdata.futures.multiple_prices import ( + futuresMultiplePricesData, +) +from sysobjects.multiple_prices import futuresMultiplePrices +from sysobjects.dict_of_named_futures_per_contract_prices import ( + list_of_price_column_names, + contract_name_from_column_name, +) +from syslogging.logger import * + +MULTIPLE_COLLECTION = "futures_multiple_prices" + + +class parquetFuturesMultiplePricesData(futuresMultiplePricesData): + """ + Class to read / write multiple futures price data to and from arctic + """ + + def __init__( + self,parquet_access: ParquetAccess, log=get_logger("parquetFuturesMultiplePricesData") + ): + + super().__init__(log=log) + self._parquet = parquet_access + + def __repr__(self): + return "parquetFuturesMultiplePricesData" + + @property + def parquet(self): + return self._parquet + + def get_list_of_instruments(self) -> list: + return self.parquet.get_all_identifiers_with_data_type(data_type=MULTIPLE_COLLECTION) + + def _get_multiple_prices_without_checking( + self, instrument_code: str + ) -> futuresMultiplePrices: + data = self.parquet.read_data_given_data_type_and_identifier(data_type=MULTIPLE_COLLECTION, identifier=instrument_code) + + return futuresMultiplePrices(data) + + def _delete_multiple_prices_without_any_warning_be_careful( + self, instrument_code: str + ): + + self.parquet.delete_data_given_data_type_and_identifier(data_type=MULTIPLE_COLLECTION, identifier=instrument_code) + self.log.debug( + "Deleted multiple prices for %s from %s" % (instrument_code, str(self)) + ) + + def _add_multiple_prices_without_checking_for_existing_entry( + self, instrument_code: str, multiple_price_data_object: futuresMultiplePrices + ): + + multiple_price_data_aspd = pd.DataFrame(multiple_price_data_object) + multiple_price_data_aspd = _change_contracts_to_str(multiple_price_data_aspd) + + self.parquet.write_data_given_data_type_and_identifier(data_type=MULTIPLE_COLLECTION, identifier=instrument_code, data_to_write=multiple_price_data_aspd) + self.log.debug( + "Wrote %s lines of prices for %s to %s" + % (len(multiple_price_data_aspd), instrument_code, str(self)), + instrument_code=instrument_code, + ) + + +def _change_contracts_to_str(multiple_price_data_aspd): + for price_column in list_of_price_column_names: + multiple_price_data_aspd[price_column] = multiple_price_data_aspd[ + price_column + ].astype(float) + + contract_column = contract_name_from_column_name(price_column) + multiple_price_data_aspd[contract_column] = multiple_price_data_aspd[ + contract_column + ].astype(str) + + return multiple_price_data_aspd diff --git a/sysinit/transfer/backup_arctic_to_parquet.py b/sysinit/transfer/backup_arctic_to_parquet.py index 6d62928eb8..1344ada068 100644 --- a/sysinit/transfer/backup_arctic_to_parquet.py +++ b/sysinit/transfer/backup_arctic_to_parquet.py @@ -11,9 +11,9 @@ from sysdata.parquet.parquet_adjusted_prices import parquetFuturesAdjustedPricesData from sysdata.parquet.parquet_capital import parquetCapitalData from sysdata.parquet.parquet_futures_per_contract_prices import parquetFuturesContractPriceData +from sysdata.parquet.parquet_multiple_prices import parquetFuturesMultiplePricesData from sysdata.csv.csv_futures_contracts import csvFuturesContractData -from sysdata.csv.csv_multiple_prices import csvFuturesMultiplePricesData from sysdata.csv.csv_spot_fx import csvFxPricesData from sysdata.csv.csv_contract_position_data import csvContractPositionData from sysdata.csv.csv_strategy_position_data import csvStrategyPositionData @@ -64,7 +64,7 @@ def backup_arctic_to_parquet(): backup_futures_contract_prices_to_parquet(backup_data) #backup_spreads_to_csv(backup_data) #backup_fx_to_csv(backup_data) - #backup_multiple_to_csv(backup_data) + backup_multiple_to_parquet(backup_data) backup_adj_to_parquet(backup_data) #backup_strategy_position_data(backup_data) #backup_contract_position_data(backup_data) @@ -79,7 +79,7 @@ def backup_arctic_to_parquet(): def get_data_blob(logname): data = dataBlob( - keep_original_prefix=True, log_name=logname + log_name=logname ) data.add_class_list( @@ -91,7 +91,7 @@ def get_data_blob(logname): parquetFuturesAdjustedPricesData, #csvFuturesContractData, parquetFuturesContractPriceData, - #csvFuturesMultiplePricesData, + parquetFuturesMultiplePricesData, #csvFxPricesData, #csvOptimalPositionData, #csvRollStateData, @@ -99,7 +99,8 @@ def get_data_blob(logname): #csvSpreadsForInstrumentData, #csvStrategyHistoricOrdersData, #csvStrategyPositionData, - ] + ], + use_prefix='parquet' ) data.add_class_list( @@ -119,7 +120,7 @@ def get_data_blob(logname): mongoSpreadCostData, mongoStrategyHistoricOrdersData, arcticStrategyPositionData, - ] + ], ) return data @@ -227,6 +228,29 @@ def backup_futures_contract_prices_for_contract_to_parquet( ) +def backup_multiple_to_parquet(data): + instrument_list = data.arctic_futures_multiple_prices.get_list_of_instruments() + for instrument_code in instrument_list: + backup_multiple_to_parquet_for_instrument(data, instrument_code) + + +def backup_multiple_to_parquet_for_instrument(data, instrument_code: str): + arctic_data = data.arctic_futures_multiple_prices.get_multiple_prices( + instrument_code + ) + data.parquet_futures_multiple_prices.add_multiple_prices( + instrument_code, arctic_data, ignore_duplication=True + ) + new_data = data.parquet_futures_multiple_prices.get_multiple_prices( + instrument_code) + data.log.debug( + "Written .csv backup multiple prices for %s was %s now %s" % (instrument_code, + arctic_data, + new_data) + ) + + + # fx def backup_fx_to_csv(data): fx_codes = data.db_fx_prices.get_list_of_fxcodes() @@ -246,37 +270,6 @@ def backup_fx_to_csv(data): data.log.warning("Problem writing .csv backup for %s" % fx_code) -def backup_multiple_to_csv(data): - instrument_list = data.arctic_futures_multiple_prices.get_list_of_instruments() - for instrument_code in instrument_list: - backup_multiple_to_csv_for_instrument(data, instrument_code) - - -def backup_multiple_to_csv_for_instrument(data, instrument_code: str): - arctic_data = data.arctic_futures_multiple_prices.get_multiple_prices( - instrument_code - ) - csv_data = data.csv_futures_multiple_prices.get_multiple_prices(instrument_code) - - if check_df_equals(arctic_data, csv_data): - data.log.debug("No multiple prices backup needed for %s" % instrument_code) - pass - else: - try: - data.csv_futures_multiple_prices.add_multiple_prices( - instrument_code, arctic_data, ignore_duplication=True - ) - data.log.debug( - "Written .csv backup multiple prices for %s" % instrument_code - ) - except BaseException: - data.log.warning( - "Problem writing .csv backup multiple prices for %s" % instrument_code - ) - - - - def backup_spreads_to_csv(data: dataBlob): From f012043c3bf8589f9052128ef39c11fbed22c83a Mon Sep 17 00:00:00 2001 From: Rob Carver Date: Mon, 20 Nov 2023 12:10:25 +0000 Subject: [PATCH 105/235] parquet backup now skips if already done --- sysinit/transfer/backup_arctic_to_parquet.py | 37 +++++++++++++------- 1 file changed, 24 insertions(+), 13 deletions(-) diff --git a/sysinit/transfer/backup_arctic_to_parquet.py b/sysinit/transfer/backup_arctic_to_parquet.py index 1344ada068..d0728bd15c 100644 --- a/sysinit/transfer/backup_arctic_to_parquet.py +++ b/sysinit/transfer/backup_arctic_to_parquet.py @@ -84,14 +84,13 @@ def get_data_blob(logname): data.add_class_list( [ + parquetFuturesMultiplePricesData, #csvBrokerHistoricOrdersData, parquetCapitalData, #csvContractHistoricOrdersData, #csvContractPositionData, parquetFuturesAdjustedPricesData, #csvFuturesContractData, - parquetFuturesContractPriceData, - parquetFuturesMultiplePricesData, #csvFxPricesData, #csvOptimalPositionData, #csvRollStateData, @@ -99,6 +98,7 @@ def get_data_blob(logname): #csvSpreadsForInstrumentData, #csvStrategyHistoricOrdersData, #csvStrategyPositionData, + parquetFuturesContractPriceData, ], use_prefix='parquet' ) @@ -137,6 +137,10 @@ def backup_adj_to_parquet_for_instrument(data: dataBlob, instrument_code: str): arctic_data = data.arctic_futures_adjusted_prices.get_adjusted_prices( instrument_code ) + px = data.parquet_futures_adjusted_prices.get_adjusted_prices(instrument_code) + if len(px)>=len(arctic_data): + data.log.warning("Appears to be more parquet data, not doing this") + return try: data.parquet_futures_adjusted_prices.add_adjusted_prices( instrument_code, arctic_data, ignore_duplication=True @@ -188,6 +192,14 @@ def backup_futures_contract_prices_for_contract_to_parquet( futures_contract ) ) + parquet_data = ( + data.parquet_futures_contract_price.get_merged_prices_for_contract_object( + futures_contract + ) + ) + if len(parquet_data)>=len(arctic_data): + data.log.warning("More parquet data, not doing") + return data.parquet_futures_contract_price.write_merged_prices_for_contract_object( futures_contract, @@ -238,6 +250,12 @@ def backup_multiple_to_parquet_for_instrument(data, instrument_code: str): arctic_data = data.arctic_futures_multiple_prices.get_multiple_prices( instrument_code ) + parquet_data = data.parquet_futures_multiple_prices.get_multiple_prices( + instrument_code) + if len(parquet_data)>=len(arctic_data): + data.log.warning("More parquet data, skipping") + return + data.parquet_futures_multiple_prices.add_multiple_prices( instrument_code, arctic_data, ignore_duplication=True ) @@ -378,6 +396,10 @@ def backup_capital(data): strategy_list = data.arctic_capital._get_list_of_strategies_with_capital_including_total() for strategy_name in strategy_list: strategy_capital_data=data.arctic_capital.get_capital_pd_df_for_strategy(strategy_name) + parquet_data = data.parquet_capital.get_capital_pd_df_for_strategy(strategy_name) + if len(parquet_data)>strategy_capital_data: + data.log.warning("More parquet data, skipping") + data.parquet_capital.update_capital_pd_df_for_strategy(strategy_name=strategy_name, updated_capital_df=strategy_capital_data) written_data = data.parquet_capital.get_capital_pd_df_for_strategy(strategy_name) print("Wrote capital data for strategy %s, was %s now %s" % (strategy_name, str(strategy_capital_data), str(written_data))) @@ -385,17 +407,6 @@ def backup_capital(data): return strategy_capital_data -def add_total_capital_to_strategy_capital_dict_return_df( - data: dataBlob, capital_data: dict -) -> pd.DataFrame: - - strategy_capital_as_df = pd.concat(capital_data, axis=1) - total_capital = data.arctic_capital.get_df_of_all_global_capital() - capital_data = pd.concat([strategy_capital_as_df, total_capital], axis=1) - - capital_data = capital_data.ffill() - - return capital_data def backup_optimal_positions(data): From e09d3b4eb389e5b499e6518b67a94a1a6582020e Mon Sep 17 00:00:00 2001 From: Rob Carver Date: Mon, 20 Nov 2023 12:12:08 +0000 Subject: [PATCH 106/235] parquet backup now skips if already done --- sysinit/transfer/backup_arctic_to_parquet.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/sysinit/transfer/backup_arctic_to_parquet.py b/sysinit/transfer/backup_arctic_to_parquet.py index d0728bd15c..484dd4227f 100644 --- a/sysinit/transfer/backup_arctic_to_parquet.py +++ b/sysinit/transfer/backup_arctic_to_parquet.py @@ -79,7 +79,8 @@ def backup_arctic_to_parquet(): def get_data_blob(logname): data = dataBlob( - log_name=logname + log_name=logname, + keep_original_prefix=True ) data.add_class_list( @@ -100,7 +101,7 @@ def get_data_blob(logname): #csvStrategyPositionData, parquetFuturesContractPriceData, ], - use_prefix='parquet' + ) data.add_class_list( From 6854b34ca720d7020afdb81b9e8a345e1c948631 Mon Sep 17 00:00:00 2001 From: Rob Carver Date: Mon, 20 Nov 2023 12:15:14 +0000 Subject: [PATCH 107/235] default is now parquet for multiple prices --- sysinit/transfer/backup_arctic_to_parquet.py | 2 +- sysproduction/data/production_data_objects.py | 6 ++++-- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/sysinit/transfer/backup_arctic_to_parquet.py b/sysinit/transfer/backup_arctic_to_parquet.py index 484dd4227f..341f165be6 100644 --- a/sysinit/transfer/backup_arctic_to_parquet.py +++ b/sysinit/transfer/backup_arctic_to_parquet.py @@ -101,7 +101,7 @@ def get_data_blob(logname): #csvStrategyPositionData, parquetFuturesContractPriceData, ], - + ) data.add_class_list( diff --git a/sysproduction/data/production_data_objects.py b/sysproduction/data/production_data_objects.py index c9c31b778b..38b2c1f3de 100644 --- a/sysproduction/data/production_data_objects.py +++ b/sysproduction/data/production_data_objects.py @@ -1,14 +1,16 @@ from sysdata.parquet.parquet_adjusted_prices import parquetFuturesAdjustedPricesData from sysdata.parquet.parquet_capital import parquetCapitalData from sysdata.parquet.parquet_futures_per_contract_prices import parquetFuturesContractPriceData +from sysdata.parquet.parquet_multiple_prices import parquetFuturesMultiplePricesData from sysdata.arctic.arctic_adjusted_prices import arcticFuturesAdjustedPricesData from sysdata.arctic.arctic_capital import arcticCapitalData from sysdata.arctic.arctic_futures_per_contract_prices import arcticFuturesContractPriceData +from sysdata.arctic.arctic_multiple_prices import arcticFuturesMultiplePricesData + from sysdata.arctic.arctic_spotfx_prices import arcticFxPricesData from sysdata.arctic.arctic_historic_contract_positions import arcticContractPositionData from sysdata.arctic.arctic_historic_strategy_positions import arcticStrategyPositionData -from sysdata.arctic.arctic_multiple_prices import arcticFuturesMultiplePricesData from sysdata.arctic.arctic_optimal_positions import arcticOptimalPositionData from sysdata.arctic.arctic_spreads import arcticSpreadsForInstrumentData @@ -59,7 +61,7 @@ FUTURES_CONTRACT_DATA: mongoFuturesContractData, FUTURES_CONTRACT_PRICE_DATA: parquetFuturesContractPriceData, - FUTURES_MULTIPLE_PRICE_DATA: arcticFuturesMultiplePricesData, + FUTURES_MULTIPLE_PRICE_DATA: parquetFuturesMultiplePricesData, FUTURES_ADJUSTED_PRICE_DATA: parquetFuturesAdjustedPricesData, CAPITAL_DATA: parquetCapitalData, From 74ca263d8e3f2715cd480ea7ed4a2e08ad8545a3 Mon Sep 17 00:00:00 2001 From: Rob Carver Date: Mon, 20 Nov 2023 12:16:02 +0000 Subject: [PATCH 108/235] default is now parquet for multiple prices --- sysdata/sim/db_futures_sim_data.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/sysdata/sim/db_futures_sim_data.py b/sysdata/sim/db_futures_sim_data.py index 9f120061f9..37c136440f 100644 --- a/sysdata/sim/db_futures_sim_data.py +++ b/sysdata/sim/db_futures_sim_data.py @@ -6,7 +6,7 @@ from syscore.constants import arg_not_supplied from sysdata.parquet.parquet_adjusted_prices import parquetFuturesAdjustedPricesData -from sysdata.arctic.arctic_multiple_prices import arcticFuturesMultiplePricesData +from sysdata.parquet.parquet_multiple_prices import parquetFuturesMultiplePricesData from sysdata.arctic.arctic_spotfx_prices import arcticFxPricesData from sysdata.csv.csv_instrument_data import csvFuturesInstrumentData from sysdata.csv.csv_roll_parameters import csvRollParametersData @@ -60,7 +60,7 @@ def get_class_for_data_type(data_type:str): ROLL_PARAMETERS_DATA: csvRollParametersData, FUTURES_INSTRUMENT_DATA: csvFuturesInstrumentData, - FUTURES_MULTIPLE_PRICE_DATA: arcticFuturesMultiplePricesData, + FUTURES_MULTIPLE_PRICE_DATA: parquetFuturesMultiplePricesData, FUTURES_ADJUSTED_PRICE_DATA: parquetFuturesAdjustedPricesData, SPREAD_DATA: mongoSpreadCostData } From afce8361a000d551beb373263b7e6988160424aa Mon Sep 17 00:00:00 2001 From: Rob Carver Date: Mon, 20 Nov 2023 12:18:40 +0000 Subject: [PATCH 109/235] back up to parquet now interactive --- sysinit/transfer/backup_arctic_to_parquet.py | 17 +++++++++++++---- 1 file changed, 13 insertions(+), 4 deletions(-) diff --git a/sysinit/transfer/backup_arctic_to_parquet.py b/sysinit/transfer/backup_arctic_to_parquet.py index 341f165be6..8524696972 100644 --- a/sysinit/transfer/backup_arctic_to_parquet.py +++ b/sysinit/transfer/backup_arctic_to_parquet.py @@ -5,6 +5,7 @@ from syscore.pandas.pdutils import check_df_equals, check_ts_equals from syscore.dateutils import CALENDAR_DAYS_IN_YEAR from syscore.dateutils import DAILY_PRICE_FREQ, HOURLY_FREQ +from syscore.interactive.input import true_if_answer_is_yes from sysdata.data_blob import dataBlob @@ -61,15 +62,23 @@ def backup_arctic_to_parquet(): log = backup_data.log log.debug("Dumping from arctic, mongo to parquet files") - backup_futures_contract_prices_to_parquet(backup_data) + do = true_if_answer_is_yes("Do futures contract prices?") + if do: + backup_futures_contract_prices_to_parquet(backup_data) #backup_spreads_to_csv(backup_data) #backup_fx_to_csv(backup_data) - backup_multiple_to_parquet(backup_data) - backup_adj_to_parquet(backup_data) + do = true_if_answer_is_yes("Multiple prices?") + if do: + backup_multiple_to_parquet(backup_data) + do = true_if_answer_is_yes("Adjusted prices?") + if do: + backup_adj_to_parquet(backup_data) #backup_strategy_position_data(backup_data) #backup_contract_position_data(backup_data) #backup_historical_orders(backup_data) - backup_capital(backup_data) + do = true_if_answer_is_yes("Capital?") + if do: + backup_capital(backup_data) #backup_contract_data(backup_data) #backup_spread_cost_data(backup_data) #backup_optimal_positions(backup_data) From d7f54cd9b17ece9a59ca2c179f670dccb8992d6c Mon Sep 17 00:00:00 2001 From: Rob Carver Date: Mon, 20 Nov 2023 12:26:12 +0000 Subject: [PATCH 110/235] added fx to backup --- sysdata/parquet/parquet_spotfx_prices.py | 57 ++++++++++++++++++++ sysinit/transfer/backup_arctic_to_parquet.py | 30 +++++------ 2 files changed, 72 insertions(+), 15 deletions(-) diff --git a/sysdata/parquet/parquet_spotfx_prices.py b/sysdata/parquet/parquet_spotfx_prices.py index e69de29bb2..449a62b4f5 100644 --- a/sysdata/parquet/parquet_spotfx_prices.py +++ b/sysdata/parquet/parquet_spotfx_prices.py @@ -0,0 +1,57 @@ +from sysdata.parquet.parquet_access import ParquetAccess + +from sysdata.fx.spotfx import fxPricesData +from sysobjects.spot_fx_prices import fxPrices +from syslogging.logger import * +import pandas as pd + +SPOTFX_COLLECTION = "spotfx_prices" + + +class parquetFxPricesData(fxPricesData): + """ + Class to read / write fx prices + """ + + def __init__(self, parquet_access: ParquetAccess, log=get_logger("parquetFxPricesData")): + + super().__init__(log=log) + self._parquet = parquet_access + + @property + def parquet(self): + return self._parquet + + def __repr__(self): + return 'parquetFxPricesData' + + def get_list_of_fxcodes(self) -> list: + return self.parquet.get_all_identifiers_with_data_type(data_type=SPOTFX_COLLECTION) + + def _get_fx_prices_without_checking(self, currency_code: str) -> fxPrices: + + fx_data = self.parquet.read_data_given_data_type_and_identifier(data_type=SPOTFX_COLLECTION, identifier=currency_code) + + fx_prices = fxPrices(fx_data[fx_data.columns[0]]) + + return fx_prices + + def _delete_fx_prices_without_any_warning_be_careful(self, currency_code: str): + log = self.log.setup(**{CURRENCY_CODE_LOG_LABEL: currency_code}) + self.parquet.delete_data_given_data_type_and_identifier(data_type=SPOTFX_COLLECTION, identifier=currency_code) + log.debug("Deleted fX prices for %s from %s" % (currency_code, str(self))) + + def _add_fx_prices_without_checking_for_existing_entry( + self, currency_code: str, fx_price_data: fxPrices + ): + log = self.log.setup(**{CURRENCY_CODE_LOG_LABEL: currency_code}) + + fx_price_data_aspd = pd.DataFrame(fx_price_data) + fx_price_data_aspd.columns = ["price"] + fx_price_data_aspd = fx_price_data_aspd.astype(float) + + self.parquet.write_data_given_data_type_and_identifier(data_type=SPOTFX_COLLECTION, identifier=currency_code) + log.debug( + "Wrote %s lines of prices for %s to %s" + % (len(fx_price_data), currency_code, str(self)) + ) diff --git a/sysinit/transfer/backup_arctic_to_parquet.py b/sysinit/transfer/backup_arctic_to_parquet.py index 8524696972..2a974f7431 100644 --- a/sysinit/transfer/backup_arctic_to_parquet.py +++ b/sysinit/transfer/backup_arctic_to_parquet.py @@ -13,9 +13,9 @@ from sysdata.parquet.parquet_capital import parquetCapitalData from sysdata.parquet.parquet_futures_per_contract_prices import parquetFuturesContractPriceData from sysdata.parquet.parquet_multiple_prices import parquetFuturesMultiplePricesData +from sysdata.parquet.parquet_spotfx_prices import parquetFxPricesData from sysdata.csv.csv_futures_contracts import csvFuturesContractData -from sysdata.csv.csv_spot_fx import csvFxPricesData from sysdata.csv.csv_contract_position_data import csvContractPositionData from sysdata.csv.csv_strategy_position_data import csvStrategyPositionData from sysdata.csv.csv_historic_orders import ( @@ -66,7 +66,9 @@ def backup_arctic_to_parquet(): if do: backup_futures_contract_prices_to_parquet(backup_data) #backup_spreads_to_csv(backup_data) - #backup_fx_to_csv(backup_data) + do = true_if_answer_is_yes("FX?") + if do: + backup_fx_to_parquet(backup_data) do = true_if_answer_is_yes("Multiple prices?") if do: backup_multiple_to_parquet(backup_data) @@ -101,7 +103,7 @@ def get_data_blob(logname): #csvContractPositionData, parquetFuturesAdjustedPricesData, #csvFuturesContractData, - #csvFxPricesData, + parquetFxPricesData, #csvOptimalPositionData, #csvRollStateData, #csvSpreadCostData, @@ -280,22 +282,20 @@ def backup_multiple_to_parquet_for_instrument(data, instrument_code: str): # fx -def backup_fx_to_csv(data): - fx_codes = data.db_fx_prices.get_list_of_fxcodes() +def backup_fx_to_parquet(data): + fx_codes = data.arctic_fx_prices.get_list_of_fxcodes() for fx_code in fx_codes: - arctic_data = data.db_fx_prices.get_fx_prices(fx_code) - csv_data = data.csv_fx_prices.get_fx_prices(fx_code) - if check_ts_equals(arctic_data, csv_data): + arctic_data = data.arctic_fx_prices.get_fx_prices(fx_code) + parquet_data = data.parquet_fx_prices.get_fx_prices(fx_code) + if len(parquet_data)>=len(arctic_data): data.log.debug("No fx backup needed for %s" % fx_code) else: # Write backup - try: - data.csv_fx_prices.add_fx_prices( - fx_code, arctic_data, ignore_duplication=True - ) - data.log.debug("Written .csv backup for %s" % fx_code) - except BaseException: - data.log.warning("Problem writing .csv backup for %s" % fx_code) + data.parquet_fx_prices.add_fx_prices( + fx_code, arctic_data, ignore_duplication=True + ) + parquet_data = data.parquet_fx_prices.get_fx_prices(fx_code) + data.log.debug("Written fx for %s, was %s now %s" % (fx_code, arctic_data, parquet_data)) From 478f4e1a0bd551f04e646a445ea22c0dda3bd0bc Mon Sep 17 00:00:00 2001 From: Rob Carver Date: Mon, 20 Nov 2023 12:27:27 +0000 Subject: [PATCH 111/235] added fx to backup --- sysdata/parquet/parquet_spotfx_prices.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sysdata/parquet/parquet_spotfx_prices.py b/sysdata/parquet/parquet_spotfx_prices.py index 449a62b4f5..d7deee9b07 100644 --- a/sysdata/parquet/parquet_spotfx_prices.py +++ b/sysdata/parquet/parquet_spotfx_prices.py @@ -50,7 +50,7 @@ def _add_fx_prices_without_checking_for_existing_entry( fx_price_data_aspd.columns = ["price"] fx_price_data_aspd = fx_price_data_aspd.astype(float) - self.parquet.write_data_given_data_type_and_identifier(data_type=SPOTFX_COLLECTION, identifier=currency_code) + self.parquet.write_data_given_data_type_and_identifier(data_type=SPOTFX_COLLECTION, identifier=currency_code, data_to_write=fx_price_data_aspd) log.debug( "Wrote %s lines of prices for %s to %s" % (len(fx_price_data), currency_code, str(self)) From 157288529e85905387dbbb6b8b6245794ab61801 Mon Sep 17 00:00:00 2001 From: Rob Carver Date: Mon, 20 Nov 2023 12:31:07 +0000 Subject: [PATCH 112/235] parquet now default for fx --- sysdata/sim/db_futures_sim_data.py | 7 ++++++- sysproduction/data/production_data_objects.py | 4 ++-- 2 files changed, 8 insertions(+), 3 deletions(-) diff --git a/sysdata/sim/db_futures_sim_data.py b/sysdata/sim/db_futures_sim_data.py index 37c136440f..c3d77ea8e0 100644 --- a/sysdata/sim/db_futures_sim_data.py +++ b/sysdata/sim/db_futures_sim_data.py @@ -7,7 +7,12 @@ from sysdata.parquet.parquet_adjusted_prices import parquetFuturesAdjustedPricesData from sysdata.parquet.parquet_multiple_prices import parquetFuturesMultiplePricesData +from sysdata.parquet.parquet_spotfx_prices import parquetFxPricesData + +from sysdata.arctic.arctic_adjusted_prices import arcticFuturesAdjustedPricesData +from sysdata.arctic.arctic_multiple_prices import arcticFuturesMultiplePricesData from sysdata.arctic.arctic_spotfx_prices import arcticFxPricesData + from sysdata.csv.csv_instrument_data import csvFuturesInstrumentData from sysdata.csv.csv_roll_parameters import csvRollParametersData from sysdata.mongodb.mongo_spread_costs import mongoSpreadCostData @@ -56,7 +61,7 @@ def get_class_for_data_type(data_type:str): return use_sim_classes[data_type] use_sim_classes = { - FX_DATA: arcticFxPricesData, + FX_DATA: parquetFxPricesData, ROLL_PARAMETERS_DATA: csvRollParametersData, FUTURES_INSTRUMENT_DATA: csvFuturesInstrumentData, diff --git a/sysproduction/data/production_data_objects.py b/sysproduction/data/production_data_objects.py index 38b2c1f3de..86197b81d9 100644 --- a/sysproduction/data/production_data_objects.py +++ b/sysproduction/data/production_data_objects.py @@ -2,12 +2,12 @@ from sysdata.parquet.parquet_capital import parquetCapitalData from sysdata.parquet.parquet_futures_per_contract_prices import parquetFuturesContractPriceData from sysdata.parquet.parquet_multiple_prices import parquetFuturesMultiplePricesData +from sysdata.parquet.parquet_spotfx_prices import parquetFxPricesData from sysdata.arctic.arctic_adjusted_prices import arcticFuturesAdjustedPricesData from sysdata.arctic.arctic_capital import arcticCapitalData from sysdata.arctic.arctic_futures_per_contract_prices import arcticFuturesContractPriceData from sysdata.arctic.arctic_multiple_prices import arcticFuturesMultiplePricesData - from sysdata.arctic.arctic_spotfx_prices import arcticFxPricesData from sysdata.arctic.arctic_historic_contract_positions import arcticContractPositionData from sysdata.arctic.arctic_historic_strategy_positions import arcticStrategyPositionData @@ -55,7 +55,7 @@ ROLL_STATE_DATA = "roll_state_data" use_production_classes = { - FX_DATA: arcticFxPricesData, + FX_DATA: parquetFxPricesData, ROLL_PARAMETERS_DATA: csvRollParametersData, FUTURES_INSTRUMENT_DATA: csvFuturesInstrumentData, FUTURES_CONTRACT_DATA: mongoFuturesContractData, From 2eb59575854761e0df35f62cc8d695c7b4962698 Mon Sep 17 00:00:00 2001 From: Rob Carver Date: Mon, 20 Nov 2023 12:37:02 +0000 Subject: [PATCH 113/235] weird error fixed --- sysdata/sim/futures_sim_data_with_data_blob.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sysdata/sim/futures_sim_data_with_data_blob.py b/sysdata/sim/futures_sim_data_with_data_blob.py index e5a557b485..24a37e82db 100644 --- a/sysdata/sim/futures_sim_data_with_data_blob.py +++ b/sysdata/sim/futures_sim_data_with_data_blob.py @@ -122,4 +122,4 @@ def db_roll_parameters(self) -> rollParametersData: @property def db_spread_cost_data(self) -> spreadCostData: - return self.data.db_spread_cost + return self.data.db_spread_cost_data From 4dfa0e32ceec74d2bfa48180c1a2d8b7633dfd91 Mon Sep 17 00:00:00 2001 From: Rob Carver Date: Mon, 20 Nov 2023 13:21:46 +0000 Subject: [PATCH 114/235] weird error fixed --- sysdata/sim/futures_sim_data_with_data_blob.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sysdata/sim/futures_sim_data_with_data_blob.py b/sysdata/sim/futures_sim_data_with_data_blob.py index 24a37e82db..e5a557b485 100644 --- a/sysdata/sim/futures_sim_data_with_data_blob.py +++ b/sysdata/sim/futures_sim_data_with_data_blob.py @@ -122,4 +122,4 @@ def db_roll_parameters(self) -> rollParametersData: @property def db_spread_cost_data(self) -> spreadCostData: - return self.data.db_spread_cost_data + return self.data.db_spread_cost From bb647b1ceb35f899c8da9eb37b25b266007e9bb8 Mon Sep 17 00:00:00 2001 From: Rob Carver Date: Mon, 20 Nov 2023 13:33:42 +0000 Subject: [PATCH 115/235] confusion over spread data --- sysdata/sim/db_futures_sim_data.py | 6 +++--- sysproduction/data/instruments.py | 6 +++--- sysproduction/data/prices.py | 6 +++--- sysproduction/data/production_data_objects.py | 10 ++++++---- sysproduction/data/sim_data.py | 4 ++-- 5 files changed, 17 insertions(+), 15 deletions(-) diff --git a/sysdata/sim/db_futures_sim_data.py b/sysdata/sim/db_futures_sim_data.py index c3d77ea8e0..d18b0c2373 100644 --- a/sysdata/sim/db_futures_sim_data.py +++ b/sysdata/sim/db_futures_sim_data.py @@ -36,7 +36,7 @@ def __init__( get_class_for_data_type(FX_DATA), get_class_for_data_type(FUTURES_INSTRUMENT_DATA), get_class_for_data_type(ROLL_PARAMETERS_DATA), - get_class_for_data_type(SPREAD_DATA) + get_class_for_data_type(STORED_SPREAD_DATA) ], ) @@ -54,7 +54,7 @@ def __repr__(self): FX_DATA = "fx_data" ROLL_PARAMETERS_DATA = "roll_parameters_data" FUTURES_INSTRUMENT_DATA = "futures_instrument_data" -SPREAD_DATA = "spread_data" +STORED_SPREAD_DATA = "stored_spread_data" def get_class_for_data_type(data_type:str): @@ -67,7 +67,7 @@ def get_class_for_data_type(data_type:str): FUTURES_MULTIPLE_PRICE_DATA: parquetFuturesMultiplePricesData, FUTURES_ADJUSTED_PRICE_DATA: parquetFuturesAdjustedPricesData, - SPREAD_DATA: mongoSpreadCostData + STORED_SPREAD_DATA: mongoSpreadCostData } diff --git a/sysproduction/data/instruments.py b/sysproduction/data/instruments.py index 1d9584766d..824776dfa2 100644 --- a/sysproduction/data/instruments.py +++ b/sysproduction/data/instruments.py @@ -9,11 +9,11 @@ from sysproduction.data.currency_data import dataCurrency from sysproduction.data.generic_production_data import productionDataLayerGeneric from sysproduction.data.config import get_list_of_stale_instruments -from sysproduction.data.production_data_objects import FX_DATA, SPREAD_DATA, get_class_for_data_type, FUTURES_INSTRUMENT_DATA +from sysproduction.data.production_data_objects import STORED_SPREAD_DATA, get_class_for_data_type, FUTURES_INSTRUMENT_DATA class updateSpreadCosts(productionDataLayerGeneric): def _add_required_classes_to_data(self, data) -> dataBlob: - data.add_class_object(get_class_for_data_type(SPREAD_DATA)) + data.add_class_object(get_class_for_data_type(STORED_SPREAD_DATA)) return data def update_spread_costs(self, instrument_code: str, spread_cost: float): @@ -35,7 +35,7 @@ class diagInstruments(productionDataLayerGeneric): def _add_required_classes_to_data(self, data: dataBlob) -> dataBlob: data.add_class_list([ get_class_for_data_type(FUTURES_INSTRUMENT_DATA), - get_class_for_data_type(SPREAD_DATA)]) + get_class_for_data_type(STORED_SPREAD_DATA)]) return data diff --git a/sysproduction/data/prices.py b/sysproduction/data/prices.py index 32e69b827d..9f53a149d0 100644 --- a/sysproduction/data/prices.py +++ b/sysproduction/data/prices.py @@ -35,7 +35,7 @@ ## default for spike checking from sysproduction.data.instruments import diagInstruments, get_block_size -from sysproduction.data.production_data_objects import get_class_for_data_type, FUTURES_CONTRACT_PRICE_DATA, FUTURES_ADJUSTED_PRICE_DATA, FUTURES_MULTIPLE_PRICE_DATA, FUTURES_CONTRACT_DATA, SPREAD_DATA +from sysproduction.data.production_data_objects import get_class_for_data_type, FUTURES_CONTRACT_PRICE_DATA, FUTURES_ADJUSTED_PRICE_DATA, FUTURES_MULTIPLE_PRICE_DATA, FUTURES_CONTRACT_DATA, HISTORIC_SPREAD_DATA VERY_BIG_NUMBER = 999999.0 @@ -47,7 +47,7 @@ def _add_required_classes_to_data(self, data) -> dataBlob: get_class_for_data_type(FUTURES_CONTRACT_PRICE_DATA), get_class_for_data_type(FUTURES_ADJUSTED_PRICE_DATA), get_class_for_data_type(FUTURES_CONTRACT_DATA), - get_class_for_data_type(SPREAD_DATA), + get_class_for_data_type(HISTORIC_SPREAD_DATA), get_class_for_data_type(FUTURES_MULTIPLE_PRICE_DATA) ] ) @@ -243,7 +243,7 @@ def _add_required_classes_to_data(self, data) -> dataBlob: get_class_for_data_type(FUTURES_MULTIPLE_PRICE_DATA), get_class_for_data_type(FUTURES_CONTRACT_DATA), get_class_for_data_type(FUTURES_ADJUSTED_PRICE_DATA), - get_class_for_data_type(SPREAD_DATA) + get_class_for_data_type(HISTORIC_SPREAD_DATA) ] ) diff --git a/sysproduction/data/production_data_objects.py b/sysproduction/data/production_data_objects.py index 86197b81d9..02bf54b8a3 100644 --- a/sysproduction/data/production_data_objects.py +++ b/sysproduction/data/production_data_objects.py @@ -28,7 +28,7 @@ mongoBrokerHistoricOrdersData, ) from sysdata.mongodb.mongo_roll_state_storage import mongoRollStateData - +from sysdata.mongodb.mongo_spread_costs import mongoSpreadCostData from sysdata.csv.csv_instrument_data import csvFuturesInstrumentData from sysdata.csv.csv_roll_parameters import csvRollParametersData @@ -40,7 +40,8 @@ CONTRACT_POSITION_DATA = "contract_position_data" STRATEGY_POSITION_DATA = "strategy_position_data" OPTIMAL_POSITION_DATA = "optimal_position_data" -SPREAD_DATA = "spread_data" +HISTORIC_SPREAD_DATA = "historic_spread_data" +STORED_SPREAD_DATA ="stored_spread_data" FX_DATA = "fx_data" ROLL_PARAMETERS_DATA = "roll_parameters_data" FUTURES_CONTRACT_DATA = "futures_contract_data" @@ -59,6 +60,7 @@ ROLL_PARAMETERS_DATA: csvRollParametersData, FUTURES_INSTRUMENT_DATA: csvFuturesInstrumentData, FUTURES_CONTRACT_DATA: mongoFuturesContractData, + STORED_SPREAD_DATA: mongoSpreadCostData, FUTURES_CONTRACT_PRICE_DATA: parquetFuturesContractPriceData, FUTURES_MULTIPLE_PRICE_DATA: parquetFuturesMultiplePricesData, @@ -69,7 +71,7 @@ CONTRACT_POSITION_DATA: arcticContractPositionData, STRATEGY_POSITION_DATA: arcticStrategyPositionData, OPTIMAL_POSITION_DATA: arcticOptimalPositionData, - SPREAD_DATA: arcticSpreadsForInstrumentData, + HISTORIC_SPREAD_DATA: arcticSpreadsForInstrumentData, STRATEGY_HISTORIC_ORDERS_DATA: mongoStrategyHistoricOrdersData, CONTRACT_HISTORIC_ORDERS_DATA: mongoContractHistoricOrdersData, @@ -77,7 +79,7 @@ INSTRUMENT_ORDER_STACK_DATA: mongoInstrumentOrderStackData, CONTRACT_ORDER_STACK_DATA: mongoContractOrderStackData, - BROKER_HISTORIC_ORDERS_DATA: mongoBrokerOrderStackData, + BROKER_ORDER_STACK_DATA: mongoBrokerOrderStackData, ROLL_STATE_DATA: mongoRollStateData, diff --git a/sysproduction/data/sim_data.py b/sysproduction/data/sim_data.py index 2eb3c622cb..03d3db4cb9 100644 --- a/sysproduction/data/sim_data.py +++ b/sysproduction/data/sim_data.py @@ -3,7 +3,7 @@ from sysdata.sim.db_futures_sim_data import dbFuturesSimData from sysdata.data_blob import dataBlob -from sysproduction.data.production_data_objects import get_class_for_data_type, FUTURES_ADJUSTED_PRICE_DATA, FUTURES_MULTIPLE_PRICE_DATA, FX_DATA, SPREAD_DATA, FUTURES_INSTRUMENT_DATA, ROLL_PARAMETERS_DATA +from sysproduction.data.production_data_objects import get_class_for_data_type, FUTURES_ADJUSTED_PRICE_DATA, FUTURES_MULTIPLE_PRICE_DATA, FX_DATA, STORED_SPREAD_DATA, FUTURES_INSTRUMENT_DATA, ROLL_PARAMETERS_DATA def get_sim_data_object_for_production(data=arg_not_supplied) -> dbFuturesSimData: # Check data has the right elements to do this @@ -15,7 +15,7 @@ def get_sim_data_object_for_production(data=arg_not_supplied) -> dbFuturesSimDat get_class_for_data_type(FUTURES_ADJUSTED_PRICE_DATA), get_class_for_data_type(FUTURES_MULTIPLE_PRICE_DATA), get_class_for_data_type(FX_DATA), - get_class_for_data_type(SPREAD_DATA), + get_class_for_data_type(STORED_SPREAD_DATA), get_class_for_data_type(FUTURES_INSTRUMENT_DATA), get_class_for_data_type(ROLL_PARAMETERS_DATA) ] From 9213d14b3d6a0fb4577828a02529daea50c5afdc Mon Sep 17 00:00:00 2001 From: Rob Carver Date: Mon, 20 Nov 2023 13:46:42 +0000 Subject: [PATCH 116/235] added historic spreads to backup --- sysdata/parquet/parquet_spreads.py | 56 ++++++++++++++++++++ sysinit/transfer/backup_arctic_to_parquet.py | 35 ++++++------ 2 files changed, 71 insertions(+), 20 deletions(-) diff --git a/sysdata/parquet/parquet_spreads.py b/sysdata/parquet/parquet_spreads.py index e69de29bb2..1daa76b13a 100644 --- a/sysdata/parquet/parquet_spreads.py +++ b/sysdata/parquet/parquet_spreads.py @@ -0,0 +1,56 @@ +from sysdata.parquet.parquet_access import ParquetAccess + +from sysdata.futures.spreads import spreadsForInstrumentData +from sysobjects.spreads import spreadsForInstrument +from syslogging.logger import * +import pandas as pd + +SPREAD_COLLECTION = "spreads" +SPREAD_COLUMN_NAME = "spread" + + +class parquetSpreadsForInstrumentData(spreadsForInstrumentData): + def __init__(self, parquet_access: ParquetAccess, log=get_logger("parquetSpreadsForInstrument")): + + super().__init__(log=log) + + self._parquet = parquet_access + + def __repr__(self): + return "parquetSpreadsForInstrument" + + @property + def parquet(self): + return self._parquet + + def get_list_of_instruments(self) -> list: + return self.parquet.get_all_identifiers_with_data_type(data_type=SPREAD_COLLECTION) + + def _get_spreads_without_checking( + self, instrument_code: str + ) -> spreadsForInstrument: + data = self.parquet.read_data_given_data_type_and_identifier(data_type=SPREAD_COLLECTION, identifier=instrument_code) + + spreads = spreadsForInstrument(data[SPREAD_COLUMN_NAME]) + + return spreads + + def _delete_spreads_without_any_warning_be_careful(self, instrument_code: str): + self.parquet.delete_data_given_data_type_and_identifier(data_type=SPREAD_COLLECTION, identifier=instrument_code) + self.log.debug( + "Deleted spreads for %s from %s" % (instrument_code, str(self)), + instrument_code=instrument_code, + ) + + def _add_spreads_without_checking_for_existing_entry( + self, instrument_code: str, spreads: spreadsForInstrument + ): + spreads_as_pd = pd.DataFrame(spreads) + spreads_as_pd.columns = [SPREAD_COLUMN_NAME] + spreads_as_pd = spreads_as_pd.astype(float) + self.parquet.write_data_given_data_type_and_identifier(data_type=SPREAD_COLLECTION, data_to_write=spreads_as_pd, identifier=instrument_code) + self.log.debug( + "Wrote %s lines of spreads for %s to %s" + % (len(spreads_as_pd), instrument_code, str(self)), + instrument_code=instrument_code, + ) diff --git a/sysinit/transfer/backup_arctic_to_parquet.py b/sysinit/transfer/backup_arctic_to_parquet.py index 2a974f7431..9564594bf5 100644 --- a/sysinit/transfer/backup_arctic_to_parquet.py +++ b/sysinit/transfer/backup_arctic_to_parquet.py @@ -14,6 +14,7 @@ from sysdata.parquet.parquet_futures_per_contract_prices import parquetFuturesContractPriceData from sysdata.parquet.parquet_multiple_prices import parquetFuturesMultiplePricesData from sysdata.parquet.parquet_spotfx_prices import parquetFxPricesData +from sysdata.parquet.parquet_spreads import parquetSpreadsForInstrumentData from sysdata.csv.csv_futures_contracts import csvFuturesContractData from sysdata.csv.csv_contract_position_data import csvContractPositionData @@ -26,7 +27,6 @@ from sysdata.csv.csv_optimal_position import csvOptimalPositionData from sysdata.csv.csv_spread_costs import csvSpreadCostData from sysdata.csv.csv_roll_state_storage import csvRollStateData -from sysdata.csv.csv_spreads import csvSpreadsForInstrumentData from sysdata.arctic.arctic_futures_per_contract_prices import ( arcticFuturesContractPriceData, @@ -65,7 +65,7 @@ def backup_arctic_to_parquet(): do = true_if_answer_is_yes("Do futures contract prices?") if do: backup_futures_contract_prices_to_parquet(backup_data) - #backup_spreads_to_csv(backup_data) + do = true_if_answer_is_yes("FX?") if do: backup_fx_to_parquet(backup_data) @@ -82,7 +82,9 @@ def backup_arctic_to_parquet(): if do: backup_capital(backup_data) #backup_contract_data(backup_data) - #backup_spread_cost_data(backup_data) + do = true_if_answer_is_yes("Time series of spread costs?") + if do: + backup_spreads_to_parquet(backup_data) #backup_optimal_positions(backup_data) #backup_roll_state_data(backup_data) @@ -107,7 +109,7 @@ def get_data_blob(logname): #csvOptimalPositionData, #csvRollStateData, #csvSpreadCostData, - #csvSpreadsForInstrumentData, + parquetSpreadsForInstrumentData, #csvStrategyHistoricOrdersData, #csvStrategyPositionData, parquetFuturesContractPriceData, @@ -300,29 +302,25 @@ def backup_fx_to_parquet(data): -def backup_spreads_to_csv(data: dataBlob): +def backup_spreads_to_parquet(data: dataBlob): instrument_list = data.arctic_spreads_for_instrument.get_list_of_instruments() for instrument_code in instrument_list: - backup_spreads_to_csv_for_instrument(data, instrument_code) + backup_spreads_to_parquet_for_instrument(data, instrument_code) -def backup_spreads_to_csv_for_instrument(data: dataBlob, instrument_code: str): +def backup_spreads_to_parquet_for_instrument(data: dataBlob, instrument_code: str): arctic_data = data.arctic_spreads_for_instrument.get_spreads(instrument_code) - csv_data = data.csv_spreads_for_instrument.get_spreads(instrument_code) + parquet_data = data.parquet_spreads_for_instrument.get_spreads(instrument_code) - if check_ts_equals(arctic_data, csv_data): + if len(parquet_data)>=len(arctic_data): data.log.debug("No spreads backup needed for %s" % instrument_code) pass else: - try: - data.csv_spreads_for_instrument.add_spreads( + data.parquet_spreads_for_instrument.add_spreads( instrument_code, arctic_data, ignore_duplication=True ) - data.log.debug("Written .csv backup for spreads %s" % instrument_code) - except BaseException: - data.log.warning( - "Problem writing .csv backup for spreads %s" % instrument_code - ) + parquet_data = data.parquet_spreads_for_instrument.get_spreads(instrument_code) + data.log.debug("Written .csv backup for spreads %s was %s now %s" % (instrument_code, str(arctic_data), str(parquet_data))) def backup_contract_position_data(data): @@ -439,10 +437,7 @@ def backup_optimal_positions(data): def backup_spread_cost_data(data): - spread_cost_as_series = data.mongo_spread_cost.get_spread_costs_as_series() - data.csv_spread_cost.write_all_instrument_spreads(spread_cost_as_series) - data.log.debug("Backed up spread cost data") - + pass def backup_roll_state_data(data): instrument_list = data.mongo_roll_state.get_list_of_instruments() From 2c3a50f72bb89f479c0284e12058c200a98273c1 Mon Sep 17 00:00:00 2001 From: Rob Carver Date: Mon, 20 Nov 2023 13:51:17 +0000 Subject: [PATCH 117/235] switched default of spread to parquet --- sysproduction/data/production_data_objects.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/sysproduction/data/production_data_objects.py b/sysproduction/data/production_data_objects.py index 02bf54b8a3..95c207bf30 100644 --- a/sysproduction/data/production_data_objects.py +++ b/sysproduction/data/production_data_objects.py @@ -3,6 +3,7 @@ from sysdata.parquet.parquet_futures_per_contract_prices import parquetFuturesContractPriceData from sysdata.parquet.parquet_multiple_prices import parquetFuturesMultiplePricesData from sysdata.parquet.parquet_spotfx_prices import parquetFxPricesData +from sysdata.parquet.parquet_spreads import parquetSpreadsForInstrumentData from sysdata.arctic.arctic_adjusted_prices import arcticFuturesAdjustedPricesData from sysdata.arctic.arctic_capital import arcticCapitalData @@ -71,7 +72,7 @@ CONTRACT_POSITION_DATA: arcticContractPositionData, STRATEGY_POSITION_DATA: arcticStrategyPositionData, OPTIMAL_POSITION_DATA: arcticOptimalPositionData, - HISTORIC_SPREAD_DATA: arcticSpreadsForInstrumentData, + HISTORIC_SPREAD_DATA: parquetSpreadsForInstrumentData, STRATEGY_HISTORIC_ORDERS_DATA: mongoStrategyHistoricOrdersData, CONTRACT_HISTORIC_ORDERS_DATA: mongoContractHistoricOrdersData, From 68154c78b43752a5aa444a6990db312c9d57eba8 Mon Sep 17 00:00:00 2001 From: Rob Carver Date: Mon, 20 Nov 2023 13:58:45 +0000 Subject: [PATCH 118/235] added optimal position data --- sysdata/parquet/parquet_optimal_positions.py | 60 ++++++++++++++++++++ sysinit/transfer/backup_arctic_to_parquet.py | 26 +++++++-- 2 files changed, 82 insertions(+), 4 deletions(-) diff --git a/sysdata/parquet/parquet_optimal_positions.py b/sysdata/parquet/parquet_optimal_positions.py index e69de29bb2..c9764f43d4 100644 --- a/sysdata/parquet/parquet_optimal_positions.py +++ b/sysdata/parquet/parquet_optimal_positions.py @@ -0,0 +1,60 @@ + +from syscore.exceptions import missingData +from sysdata.parquet.parquet_access import ParquetAccess +from sysdata.production.optimal_positions import optimalPositionData +from syslogging.logger import * + +from sysobjects.production.tradeable_object import ( + instrumentStrategy, + listOfInstrumentStrategies, +) + +import pandas as pd + +OPTIMAL_POSITION_COLLECTION = "optimal_positions" + + +class parquetOptimalPositionData(optimalPositionData): + def __init__(self, parquet_access: ParquetAccess, log=get_logger("parquetOptimalPositionData")): + + super().__init__(log=log) + self._parquet = parquet_access + + def __repr__(self): + return "parquetOptimalPositionData" + + @property + def parquet(self): + return self._parquet + + def get_list_of_instrument_strategies_with_optimal_position( + self, + ) -> listOfInstrumentStrategies: + + raw_list_of_instrument_strategies = self.parquet.get_all_identifiers_with_data_type(data_type=OPTIMAL_POSITION_COLLECTION) + list_of_instrument_strategies = [ + instrumentStrategy.from_key(key) + for key in raw_list_of_instrument_strategies + ] + + return listOfInstrumentStrategies(list_of_instrument_strategies) + + def get_optimal_position_as_df_for_instrument_strategy( + self, instrument_strategy: instrumentStrategy + ) -> pd.DataFrame: + + try: + ident = instrument_strategy.key + df_result = self.parquet.read_data_given_data_type_and_identifier(data_type=OPTIMAL_POSITION_COLLECTION, identifier=ident) + except: + raise missingData + + return df_result + + def write_optimal_position_as_df_for_instrument_strategy_without_checking( + self, + instrument_strategy: instrumentStrategy, + optimal_positions_as_df: pd.DataFrame, + ): + ident = instrument_strategy.key + self.parquet.write_data_given_data_type_and_identifier(data_type=OPTIMAL_POSITION_COLLECTION, identifier=ident, data_to_write=optimal_positions_as_df) diff --git a/sysinit/transfer/backup_arctic_to_parquet.py b/sysinit/transfer/backup_arctic_to_parquet.py index 9564594bf5..871c319756 100644 --- a/sysinit/transfer/backup_arctic_to_parquet.py +++ b/sysinit/transfer/backup_arctic_to_parquet.py @@ -15,6 +15,7 @@ from sysdata.parquet.parquet_multiple_prices import parquetFuturesMultiplePricesData from sysdata.parquet.parquet_spotfx_prices import parquetFxPricesData from sysdata.parquet.parquet_spreads import parquetSpreadsForInstrumentData +from sysdata.parquet.parquet_optimal_positions import parquetOptimalPositionData from sysdata.csv.csv_futures_contracts import csvFuturesContractData from sysdata.csv.csv_contract_position_data import csvContractPositionData @@ -85,7 +86,9 @@ def backup_arctic_to_parquet(): do = true_if_answer_is_yes("Time series of spread costs?") if do: backup_spreads_to_parquet(backup_data) - #backup_optimal_positions(backup_data) + do = true_if_answer_is_yes("optimal positions?") + if do: + backup_optimal_positions(backup_data) #backup_roll_state_data(backup_data) @@ -106,7 +109,7 @@ def get_data_blob(logname): parquetFuturesAdjustedPricesData, #csvFuturesContractData, parquetFxPricesData, - #csvOptimalPositionData, + parquetOptimalPositionData, #csvRollStateData, #csvSpreadCostData, parquetSpreadsForInstrumentData, @@ -430,10 +433,25 @@ def backup_optimal_positions(data): ) except missingData: continue - data.csv_optimal_position.write_optimal_position_as_df_for_instrument_strategy_without_checking( + + try: + parquet_data = data.parquet_optimal_position.get_optimal_position_as_df_for_instrument_strategy( + instrument_strategy + ) + except missingData: + parquet_data=[] + + if len(parquet_data)>=len(arctic_data): + data.log.debug("skipping already written") + + data.parquet_optimal_position.write_optimal_position_as_df_for_instrument_strategy_without_checking( instrument_strategy, arctic_data ) - data.log.debug("Backed up %s optimal position data" % str(instrument_strategy)) + parquet_data = data.parquet_optimal_position.get_optimal_position_as_df_for_instrument_strategy( + instrument_strategy + ) + + data.log.debug("Backed up %s optimal position data was %s now %s" % (str(instrument_strategy), str(arctic_data), str(parquet_data))) def backup_spread_cost_data(data): From 2939dd4674bb94a8c97cdfe043732d694b33adee Mon Sep 17 00:00:00 2001 From: Rob Carver Date: Mon, 20 Nov 2023 14:03:17 +0000 Subject: [PATCH 119/235] switched optimal positions to parquet --- sysdata/sim/db_futures_sim_data.py | 2 ++ sysproduction/data/production_data_objects.py | 10 +++++++--- 2 files changed, 9 insertions(+), 3 deletions(-) diff --git a/sysdata/sim/db_futures_sim_data.py b/sysdata/sim/db_futures_sim_data.py index d18b0c2373..4f7e971c45 100644 --- a/sysdata/sim/db_futures_sim_data.py +++ b/sysdata/sim/db_futures_sim_data.py @@ -9,9 +9,11 @@ from sysdata.parquet.parquet_multiple_prices import parquetFuturesMultiplePricesData from sysdata.parquet.parquet_spotfx_prices import parquetFxPricesData +""" from sysdata.arctic.arctic_adjusted_prices import arcticFuturesAdjustedPricesData from sysdata.arctic.arctic_multiple_prices import arcticFuturesMultiplePricesData from sysdata.arctic.arctic_spotfx_prices import arcticFxPricesData +""" from sysdata.csv.csv_instrument_data import csvFuturesInstrumentData from sysdata.csv.csv_roll_parameters import csvRollParametersData diff --git a/sysproduction/data/production_data_objects.py b/sysproduction/data/production_data_objects.py index 95c207bf30..e8e8e2c013 100644 --- a/sysproduction/data/production_data_objects.py +++ b/sysproduction/data/production_data_objects.py @@ -4,17 +4,21 @@ from sysdata.parquet.parquet_multiple_prices import parquetFuturesMultiplePricesData from sysdata.parquet.parquet_spotfx_prices import parquetFxPricesData from sysdata.parquet.parquet_spreads import parquetSpreadsForInstrumentData +from sysdata.parquet.parquet_optimal_positions import parquetOptimalPositionData +""" from sysdata.arctic.arctic_adjusted_prices import arcticFuturesAdjustedPricesData from sysdata.arctic.arctic_capital import arcticCapitalData from sysdata.arctic.arctic_futures_per_contract_prices import arcticFuturesContractPriceData from sysdata.arctic.arctic_multiple_prices import arcticFuturesMultiplePricesData from sysdata.arctic.arctic_spotfx_prices import arcticFxPricesData -from sysdata.arctic.arctic_historic_contract_positions import arcticContractPositionData -from sysdata.arctic.arctic_historic_strategy_positions import arcticStrategyPositionData from sysdata.arctic.arctic_optimal_positions import arcticOptimalPositionData from sysdata.arctic.arctic_spreads import arcticSpreadsForInstrumentData +""" +from sysdata.arctic.arctic_historic_contract_positions import arcticContractPositionData +from sysdata.arctic.arctic_historic_strategy_positions import arcticStrategyPositionData + from sysdata.mongodb.mongo_futures_contracts import mongoFuturesContractData from sysdata.mongodb.mongo_process_control import mongoControlProcessData @@ -71,7 +75,7 @@ CONTRACT_POSITION_DATA: arcticContractPositionData, STRATEGY_POSITION_DATA: arcticStrategyPositionData, - OPTIMAL_POSITION_DATA: arcticOptimalPositionData, + OPTIMAL_POSITION_DATA: parquetOptimalPositionData, HISTORIC_SPREAD_DATA: parquetSpreadsForInstrumentData, STRATEGY_HISTORIC_ORDERS_DATA: mongoStrategyHistoricOrdersData, From 225390ea7820af67e133c72c308fa8fefcb9a66d Mon Sep 17 00:00:00 2001 From: Rob Carver Date: Mon, 20 Nov 2023 14:16:13 +0000 Subject: [PATCH 120/235] added position data to backup --- .../parquet_historic_contract_positions.py | 64 ++++++++++++++++++ .../parquet_historic_strategy_positions.py | 65 +++++++++++++++++++ sysinit/transfer/backup_arctic_to_parquet.py | 62 ++++++++++++++---- 3 files changed, 180 insertions(+), 11 deletions(-) create mode 100644 sysdata/parquet/parquet_historic_strategy_positions.py diff --git a/sysdata/parquet/parquet_historic_contract_positions.py b/sysdata/parquet/parquet_historic_contract_positions.py index e69de29bb2..69c43f6c53 100644 --- a/sysdata/parquet/parquet_historic_contract_positions.py +++ b/sysdata/parquet/parquet_historic_contract_positions.py @@ -0,0 +1,64 @@ + +import pandas as pd + +from sysobjects.contracts import futuresContract, listOfFuturesContracts +from sysdata.parquet.parquet_access import ParquetAccess + +from sysdata.production.historic_contract_positions import contractPositionData + +from syscore.exceptions import missingData + +from syslogging.logger import * + +CONTRACT_POSITION_COLLECTION = "contract_positions" + + +class parquetContractPositionData(contractPositionData): + def __init__(self, parquet_access: ParquetAccess, log=get_logger("parquetContractPositionData")): + + super().__init__(log=log) + + self._parquet = parquet_access + + def __repr__(self): + return "parquetContractPositionData" + + @property + def parquet(self): + return self._parquet + + def _write_updated_position_series_for_contract_object( + self, contract_object: futuresContract, updated_series: pd.Series + ): + ## overwrites what is there without checking + ident = contract_object.key + updated_data_as_df = pd.DataFrame(updated_series) + updated_data_as_df.columns = ["position"] + + self.parquet.write_data_given_data_type_and_identifier(data_to_write=updated_data_as_df, identifier=ident, data_type=CONTRACT_POSITION_COLLECTION) + + def _delete_position_series_for_contract_object_without_checking( + self, contract_object: futuresContract + ): + ident = contract_object.key + self.parquet.delete_data_given_data_type_and_identifier(data_type=CONTRACT_POSITION_COLLECTION, identifier=ident) + + def get_position_as_series_for_contract_object( + self, contract_object: futuresContract + ) -> pd.Series: + keyname = contract_object.key + try: + pd_df = self.parquet.read_data_given_data_type_and_identifier(data_type=CONTRACT_POSITION_COLLECTION, identifier=keyname) + except: + raise missingData + + return pd_df.iloc[:, 0] + + def get_list_of_contracts(self) -> listOfFuturesContracts: + ## doesn't remove zero positions + list_of_keys = self.parquet.get_all_identifiers_with_data_type(data_type=CONTRACT_POSITION_COLLECTION) + list_of_futures_contract = [ + futuresContract.from_key(key) for key in list_of_keys + ] + + return listOfFuturesContracts(list_of_futures_contract) diff --git a/sysdata/parquet/parquet_historic_strategy_positions.py b/sysdata/parquet/parquet_historic_strategy_positions.py new file mode 100644 index 0000000000..d71986fcba --- /dev/null +++ b/sysdata/parquet/parquet_historic_strategy_positions.py @@ -0,0 +1,65 @@ +import pandas as pd + +from sysobjects.production.tradeable_object import ( + listOfInstrumentStrategies, + instrumentStrategy, +) +from sysdata.parquet.parquet_access import ParquetAccess +from sysdata.production.historic_strategy_positions import strategyPositionData +from syscore.exceptions import missingData + +from syslogging.logger import * + +STRATEGY_POSITION_COLLECTION = "strategy_positions" + + +class parquetStrategyPositionData(strategyPositionData): + def __init__(self, parquet_access: ParquetAccess, log=get_logger("parquetStrategyPositionData")): + + super().__init__(log=log) + + self._parquet = parquet_access + + def __repr__(self): + return "parquetStrategyPositionData" + + @property + @property + def parquet(self): + return self._parquet + + def get_list_of_instrument_strategies(self) -> listOfInstrumentStrategies: + list_of_keys = self.parquet.get_all_identifiers_with_data_type(data_type=STRATEGY_POSITION_COLLECTION) + list_of_instrument_strategies = [ + instrumentStrategy.from_key(key) for key in list_of_keys + ] + + return listOfInstrumentStrategies(list_of_instrument_strategies) + + def _write_updated_position_series_for_instrument_strategy_object( + self, instrument_strategy: instrumentStrategy, updated_series: pd.Series + ): + + ident = instrument_strategy.key + updated_data_as_df = pd.DataFrame(updated_series) + updated_data_as_df.columns = ["position"] + + self.parquet.write_data_given_data_type_and_identifier(data_to_write=updated_data_as_df, identifier=ident, data_type=STRATEGY_POSITION_COLLECTION) + + def _delete_position_series_for_instrument_strategy_object_without_checking( + self, instrument_strategy: instrumentStrategy + ): + ident = instrument_strategy.key + self.parquet.delete_data_given_data_type_and_identifier(data_type=STRATEGY_POSITION_COLLECTION, identifier=ident) + + def get_position_as_series_for_instrument_strategy_object( + self, instrument_strategy: instrumentStrategy + ) -> pd.Series: + + keyname = instrument_strategy.key + try: + pd_df = self.parquet.read_data_given_data_type_and_identifier(data_type=STRATEGY_POSITION_COLLECTION, identifier=keyname) + except: + raise missingData + + return pd_df.iloc[:, 0] diff --git a/sysinit/transfer/backup_arctic_to_parquet.py b/sysinit/transfer/backup_arctic_to_parquet.py index 871c319756..c010c8237f 100644 --- a/sysinit/transfer/backup_arctic_to_parquet.py +++ b/sysinit/transfer/backup_arctic_to_parquet.py @@ -16,6 +16,8 @@ from sysdata.parquet.parquet_spotfx_prices import parquetFxPricesData from sysdata.parquet.parquet_spreads import parquetSpreadsForInstrumentData from sysdata.parquet.parquet_optimal_positions import parquetOptimalPositionData +from sysdata.parquet.parquet_historic_contract_positions import parquetContractPositionData +from sysdata.parquet.parquet_historic_strategy_positions import parquetStrategyPositionData from sysdata.csv.csv_futures_contracts import csvFuturesContractData from sysdata.csv.csv_contract_position_data import csvContractPositionData @@ -76,19 +78,25 @@ def backup_arctic_to_parquet(): do = true_if_answer_is_yes("Adjusted prices?") if do: backup_adj_to_parquet(backup_data) - #backup_strategy_position_data(backup_data) - #backup_contract_position_data(backup_data) - #backup_historical_orders(backup_data) + do = true_if_answer_is_yes("Strategy positions?") + if do: + backup_strategy_position_data(backup_data) + do = true_if_answer_is_yes("Contract positions?") + if do: + backup_contract_position_data(backup_data) + do = true_if_answer_is_yes("Capital?") if do: backup_capital(backup_data) - #backup_contract_data(backup_data) do = true_if_answer_is_yes("Time series of spread costs?") if do: backup_spreads_to_parquet(backup_data) do = true_if_answer_is_yes("optimal positions?") if do: backup_optimal_positions(backup_data) + + # backup_contract_data(backup_data) + # backup_historical_orders(backup_data) #backup_roll_state_data(backup_data) @@ -343,12 +351,28 @@ def backup_contract_position_data(data): ) except missingData: print("No data to write to .csv") - else: - data.csv_contract_position.overwrite_position_series_for_contract_object_without_checking( - contract, arctic_data + continue + + try: + parquet_data = data.parquet_contract_position.get_position_as_series_for_contract_object( + contract ) + except missingData: + parquet_data = [] + + if len(parquet_data)>=len(arctic_data): + data.log.debug("Skipping") + continue + + data.parquet_contract_position.overwrite_position_series_for_contract_object_without_checking( + contract, arctic_data + ) + parquet_data = data.parquet_contract_position.get_position_as_series_for_contract_object( + contract + ) + data.log.debug( - "Backed up %s %s contract position data" % (instrument_code, contract) + "Backed up %s %s contract position data was %s now %s" % (instrument_code, contract, str(arctic_data), str(parquet_data)) ) @@ -368,12 +392,28 @@ def backup_strategy_position_data(data): ) except missingData: continue - data.csv_strategy_position.overwrite_position_series_for_instrument_strategy_without_checking( + + try: + parquet_data = data.parquet_strategy_position.get_position_as_series_for_instrument_strategy_object( + instrument_strategy + ) + except missingData: + parquet_data = [] + if len(parquet_data)>=len(arctic_data): + data.log.debug("Skipping") + continue + + data.parquet_strategy_position.overwrite_position_series_for_instrument_strategy_without_checking( instrument_strategy, arctic_data ) + + parquet_data = data.parquet_strategy_position.get_position_as_series_for_instrument_strategy_object( + instrument_strategy + ) + data.log.debug( - "Backed up %s %s strategy position data" - % (instrument_code, strategy_name) + "Backed up %s %s strategy position data was %s now %s" + % (instrument_code, strategy_name, str(arctic_data), str(parquet_data)) ) From 886d7b1a7f3b36d2458211da7b2f90717267b053 Mon Sep 17 00:00:00 2001 From: Rob Carver Date: Mon, 20 Nov 2023 14:18:49 +0000 Subject: [PATCH 121/235] added position data to backup --- sysinit/transfer/backup_arctic_to_parquet.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/sysinit/transfer/backup_arctic_to_parquet.py b/sysinit/transfer/backup_arctic_to_parquet.py index c010c8237f..661175a819 100644 --- a/sysinit/transfer/backup_arctic_to_parquet.py +++ b/sysinit/transfer/backup_arctic_to_parquet.py @@ -113,7 +113,7 @@ def get_data_blob(logname): #csvBrokerHistoricOrdersData, parquetCapitalData, #csvContractHistoricOrdersData, - #csvContractPositionData, + parquetContractPositionData, parquetFuturesAdjustedPricesData, #csvFuturesContractData, parquetFxPricesData, @@ -122,7 +122,7 @@ def get_data_blob(logname): #csvSpreadCostData, parquetSpreadsForInstrumentData, #csvStrategyHistoricOrdersData, - #csvStrategyPositionData, + parquetStrategyPositionData, parquetFuturesContractPriceData, ], From e149efa40241940548e5361788a07f0b4fcab0b3 Mon Sep 17 00:00:00 2001 From: Rob Carver Date: Mon, 20 Nov 2023 14:20:51 +0000 Subject: [PATCH 122/235] added position data to backup --- sysdata/parquet/parquet_historic_strategy_positions.py | 1 - 1 file changed, 1 deletion(-) diff --git a/sysdata/parquet/parquet_historic_strategy_positions.py b/sysdata/parquet/parquet_historic_strategy_positions.py index d71986fcba..f016046efc 100644 --- a/sysdata/parquet/parquet_historic_strategy_positions.py +++ b/sysdata/parquet/parquet_historic_strategy_positions.py @@ -23,7 +23,6 @@ def __init__(self, parquet_access: ParquetAccess, log=get_logger("parquetStrateg def __repr__(self): return "parquetStrategyPositionData" - @property @property def parquet(self): return self._parquet From 8918298449c604c2c30e32c2ed5cf9e627e6ac5e Mon Sep 17 00:00:00 2001 From: Rob Carver Date: Mon, 20 Nov 2023 14:26:15 +0000 Subject: [PATCH 123/235] added position data to backup --- .../parquet_historic_contract_positions.py | 15 +++++++++++---- 1 file changed, 11 insertions(+), 4 deletions(-) diff --git a/sysdata/parquet/parquet_historic_contract_positions.py b/sysdata/parquet/parquet_historic_contract_positions.py index 69c43f6c53..eaae2455a0 100644 --- a/sysdata/parquet/parquet_historic_contract_positions.py +++ b/sysdata/parquet/parquet_historic_contract_positions.py @@ -31,7 +31,7 @@ def _write_updated_position_series_for_contract_object( self, contract_object: futuresContract, updated_series: pd.Series ): ## overwrites what is there without checking - ident = contract_object.key + ident = from_contract_to_key(contract_object) updated_data_as_df = pd.DataFrame(updated_series) updated_data_as_df.columns = ["position"] @@ -40,13 +40,13 @@ def _write_updated_position_series_for_contract_object( def _delete_position_series_for_contract_object_without_checking( self, contract_object: futuresContract ): - ident = contract_object.key + ident = from_contract_to_key(contract_object) self.parquet.delete_data_given_data_type_and_identifier(data_type=CONTRACT_POSITION_COLLECTION, identifier=ident) def get_position_as_series_for_contract_object( self, contract_object: futuresContract ) -> pd.Series: - keyname = contract_object.key + keyname = from_contract_to_key(contract_object) try: pd_df = self.parquet.read_data_given_data_type_and_identifier(data_type=CONTRACT_POSITION_COLLECTION, identifier=keyname) except: @@ -58,7 +58,14 @@ def get_list_of_contracts(self) -> listOfFuturesContracts: ## doesn't remove zero positions list_of_keys = self.parquet.get_all_identifiers_with_data_type(data_type=CONTRACT_POSITION_COLLECTION) list_of_futures_contract = [ - futuresContract.from_key(key) for key in list_of_keys + from_key_to_contract(key) for key in list_of_keys ] return listOfFuturesContracts(list_of_futures_contract) + +def from_contract_to_key(contract: futuresContract) -> str: + return contract.instrument_code+"#"+contract.contract_date + +def from_key_to_contract(key: str) -> futuresContract: + [instrument_code, contract_date] = key.split("#") + return futuresContract(instrument_code, contract_date) \ No newline at end of file From 6ec9e1fdbe443b403d149c01daee374cad0f7d32 Mon Sep 17 00:00:00 2001 From: Rob Carver Date: Mon, 20 Nov 2023 14:28:11 +0000 Subject: [PATCH 124/235] added position data to backup --- sysdata/parquet/parquet_historic_contract_positions.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sysdata/parquet/parquet_historic_contract_positions.py b/sysdata/parquet/parquet_historic_contract_positions.py index eaae2455a0..b2cc6c7537 100644 --- a/sysdata/parquet/parquet_historic_contract_positions.py +++ b/sysdata/parquet/parquet_historic_contract_positions.py @@ -64,7 +64,7 @@ def get_list_of_contracts(self) -> listOfFuturesContracts: return listOfFuturesContracts(list_of_futures_contract) def from_contract_to_key(contract: futuresContract) -> str: - return contract.instrument_code+"#"+contract.contract_date + return str(contract.instrument_code)+"#"+str(contract.contract_date) def from_key_to_contract(key: str) -> futuresContract: [instrument_code, contract_date] = key.split("#") From 72db7351e805e328af8afc0fc7d240210e99c773 Mon Sep 17 00:00:00 2001 From: Rob Carver Date: Mon, 20 Nov 2023 14:31:35 +0000 Subject: [PATCH 125/235] switched default for position storage to parquet --- sysproduction/data/production_data_objects.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/sysproduction/data/production_data_objects.py b/sysproduction/data/production_data_objects.py index e8e8e2c013..99399b77df 100644 --- a/sysproduction/data/production_data_objects.py +++ b/sysproduction/data/production_data_objects.py @@ -5,6 +5,8 @@ from sysdata.parquet.parquet_spotfx_prices import parquetFxPricesData from sysdata.parquet.parquet_spreads import parquetSpreadsForInstrumentData from sysdata.parquet.parquet_optimal_positions import parquetOptimalPositionData +from sysdata.parquet.parquet_historic_strategy_positions import parquetStrategyPositionData +from sysdata.parquet.parquet_historic_contract_positions import parquetContractPositionData """ from sysdata.arctic.arctic_adjusted_prices import arcticFuturesAdjustedPricesData @@ -14,10 +16,9 @@ from sysdata.arctic.arctic_spotfx_prices import arcticFxPricesData from sysdata.arctic.arctic_optimal_positions import arcticOptimalPositionData from sysdata.arctic.arctic_spreads import arcticSpreadsForInstrumentData - -""" from sysdata.arctic.arctic_historic_contract_positions import arcticContractPositionData from sysdata.arctic.arctic_historic_strategy_positions import arcticStrategyPositionData +""" from sysdata.mongodb.mongo_futures_contracts import mongoFuturesContractData @@ -73,8 +74,8 @@ CAPITAL_DATA: parquetCapitalData, - CONTRACT_POSITION_DATA: arcticContractPositionData, - STRATEGY_POSITION_DATA: arcticStrategyPositionData, + CONTRACT_POSITION_DATA: parquetContractPositionData, + STRATEGY_POSITION_DATA: parquetStrategyPositionData, OPTIMAL_POSITION_DATA: parquetOptimalPositionData, HISTORIC_SPREAD_DATA: parquetSpreadsForInstrumentData, From a1ee94c6d7a2b1c88b4d46ca6d2599b7584f046c Mon Sep 17 00:00:00 2001 From: Rob Carver Date: Mon, 20 Nov 2023 14:56:52 +0000 Subject: [PATCH 126/235] added parquet backup changed control config --- requirements.txt | 7 ++-- setup.py | 18 +++----- syscontrol/control_config.yaml | 4 +- .../backup_parquet_data_to_remote.py | 41 +++++++++++++++++++ .../linux/scripts/backup_parquet_to_remote | 3 ++ sysproduction/run_backups.py | 4 ++ 6 files changed, 59 insertions(+), 18 deletions(-) create mode 100644 sysproduction/backup_parquet_data_to_remote.py create mode 100644 sysproduction/linux/scripts/backup_parquet_to_remote diff --git a/requirements.txt b/requirements.txt index 6d381481d0..90a9db23b9 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,15 +1,14 @@ -pandas==1.0.5 +pandas==2.1.3 matplotlib>=3.0.0 pyyaml==5.4 -numpy>=1.19.4,<1.24.0 +numpy>=1.24.0 scipy>=1.0.0 pymongo==3.11.3 -arctic==1.79.2 ib-insync==0.9.86 psutil==5.6.6 pytest>6.2 Flask>=2.0.1 Werkzeug>=2.0.1 -statsmodels==0.13.0 +statsmodels==0.14.0 PyPDF2>=2.5.0 pyarrow>=14.0.1 \ No newline at end of file diff --git a/setup.py b/setup.py index 454f4b819a..5fb1ac5b43 100755 --- a/setup.py +++ b/setup.py @@ -5,15 +5,8 @@ from setuptools import setup, find_packages from distutils.version import StrictVersion -if StrictVersion(platform.python_version()) <= StrictVersion("3.7.0"): - print("pysystemtrade requires Python 3.7.0 or later. Exiting.", file=sys.stderr) - sys.exit(1) - -if StrictVersion(platform.python_version()) >= StrictVersion("3.9.0"): - print( - "pysystemtrade requires Python 3.8.* or earlier (pandas issue). Exiting.", - file=sys.stderr, - ) +if StrictVersion(platform.python_version()) <= StrictVersion("3.10.0"): + print("pysystemtrade requires Python 3.10.0 or later. Exiting.", file=sys.stderr) sys.exit(1) @@ -91,19 +84,18 @@ def dir_this_file(): package_data=package_data, long_description=read("README.md"), install_requires=[ - "pandas==1.0.5", + "pandas==2.1.3", "matplotlib>=3.0.0", "ib-insync==0.9.86", "PyYAML>=5.4", - "numpy>=1.19.4,<1.24.0", + "numpy>=1.24.0", "scipy>=1.0.0", "pymongo==3.11.3", - "arctic==1.79.2", "psutil==5.6.6", "pytest>6.2", "Flask>=2.0.1", "Werkzeug>=2.0.1", - "statsmodels==0.12.2", + "statsmodels==0.14.0", "PyPDF2>=2.5.0", "pyarrow>=14.0.1" ], diff --git a/syscontrol/control_config.yaml b/syscontrol/control_config.yaml index e9d2256c51..8d77a1f8c3 100644 --- a/syscontrol/control_config.yaml +++ b/syscontrol/control_config.yaml @@ -116,7 +116,9 @@ process_configuration_methods: account_curve_report: max_executions: 1 run_backups: - backup_arctic_to_csv: + backup_db_to_csv: + max_executions: 1 + backup_parquet: max_executions: 1 backup_files: max_executions: 1 diff --git a/sysproduction/backup_parquet_data_to_remote.py b/sysproduction/backup_parquet_data_to_remote.py new file mode 100644 index 0000000000..5de0f9b143 --- /dev/null +++ b/sysproduction/backup_parquet_data_to_remote.py @@ -0,0 +1,41 @@ +import os +from sysdata.config.production_config import get_production_config + +from sysproduction.data.directories import ( + + get_mongo_backup_directory, + +) + +from sysdata.data_blob import dataBlob + + +def backup_parquet_data_to_remote(): + data = dataBlob(log_name="backup_mongo_data_as_dump") + backup_object = backupParquet(data) + backup_object.backup_mongo_data_as_dump() + + return None + + +class backupParquet(object): + def __init__(self, data): + self.data = data + + def backup_parquet_data_to_remote(self): + data = self.data + log = data.log + log.debug("Copying data to backup destination") + backup_parquet_data(data) + + + +def backup_parquet_data(data): + source_path = get_parquet_directory() + destination_path = get_parquet_backup_directory() + data.log.debug("Copy from %s to %s" % (source_path, destination_path)) + os.system("rsync -av %s %s" % (source_path, destination_path)) + + +if __name__ == "__main__": + backup_parquet_data_to_remote() diff --git a/sysproduction/linux/scripts/backup_parquet_to_remote b/sysproduction/linux/scripts/backup_parquet_to_remote new file mode 100644 index 0000000000..4260f122c2 --- /dev/null +++ b/sysproduction/linux/scripts/backup_parquet_to_remote @@ -0,0 +1,3 @@ +#!/bin/bash +. ~/.profile +. p sysproduction.backup_parquet_data_to_remote.backup_parquet_data_to_remote \ No newline at end of file diff --git a/sysproduction/run_backups.py b/sysproduction/run_backups.py index 5a53e7acf3..7b53cd0a8f 100644 --- a/sysproduction/run_backups.py +++ b/sysproduction/run_backups.py @@ -2,6 +2,7 @@ from sysproduction.backup_db_to_csv import backupDbToCsv from sysproduction.backup_mongo_data_as_dump import backupMongo from sysproduction.backup_state_files import backupStateFiles +from sysproduction.backup_parquet_data_to_remote import backupParquet from sysdata.data_blob import dataBlob @@ -17,15 +18,18 @@ def get_list_of_timer_functions_for_backup(): data_db_backups = dataBlob(log_name="backup_db_to_csv") data_state_files = dataBlob(log_name="backup_files") data_mongo_dump = dataBlob(log_name="backup_mongo_data_as_dump") + data_parquet_backup = dataBlob(log_name="backup_parquet_to_remote") db_backup_object = backupDbToCsv(data_db_backups) statefile_backup_object = backupStateFiles(data_state_files) mongodump_backup_object = backupMongo(data_mongo_dump) + parquet_backup_object = backupParquet(data_parquet_backup) list_of_timer_names_and_functions = [ ("backup_db_to_csv", db_backup_object), ("backup_mongo_data_as_dump", mongodump_backup_object), ("backup_files", statefile_backup_object), + ("backup_parquet", parquet_backup_object) ] return list_of_timer_names_and_functions From 045115754d5d5c4c29b8de5d5da150267e60e022 Mon Sep 17 00:00:00 2001 From: Rob Carver Date: Mon, 20 Nov 2023 14:59:39 +0000 Subject: [PATCH 127/235] accidentally wrote over setup and requirements --- requirements.txt | 7 ++++--- setup.py | 18 +++++++++++++----- 2 files changed, 17 insertions(+), 8 deletions(-) diff --git a/requirements.txt b/requirements.txt index 90a9db23b9..6d381481d0 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,14 +1,15 @@ -pandas==2.1.3 +pandas==1.0.5 matplotlib>=3.0.0 pyyaml==5.4 -numpy>=1.24.0 +numpy>=1.19.4,<1.24.0 scipy>=1.0.0 pymongo==3.11.3 +arctic==1.79.2 ib-insync==0.9.86 psutil==5.6.6 pytest>6.2 Flask>=2.0.1 Werkzeug>=2.0.1 -statsmodels==0.14.0 +statsmodels==0.13.0 PyPDF2>=2.5.0 pyarrow>=14.0.1 \ No newline at end of file diff --git a/setup.py b/setup.py index 5fb1ac5b43..0c8c3f6169 100755 --- a/setup.py +++ b/setup.py @@ -5,10 +5,17 @@ from setuptools import setup, find_packages from distutils.version import StrictVersion -if StrictVersion(platform.python_version()) <= StrictVersion("3.10.0"): - print("pysystemtrade requires Python 3.10.0 or later. Exiting.", file=sys.stderr) + +if StrictVersion(platform.python_version()) <= StrictVersion("3.7.0"): + print("pysystemtrade requires Python 3.7.0 or later. Exiting.", file=sys.stderr) sys.exit(1) +if StrictVersion(platform.python_version()) >= StrictVersion("3.9.0"): + print( + "pysystemtrade requires Python 3.8.* or earlier (pandas issue). Exiting.", + file=sys.stderr, + ) + sys.exit(1) def read(fname): """Utility function to read the README file.""" @@ -84,18 +91,19 @@ def dir_this_file(): package_data=package_data, long_description=read("README.md"), install_requires=[ - "pandas==2.1.3", + "pandas==1.0.5", "matplotlib>=3.0.0", "ib-insync==0.9.86", "PyYAML>=5.4", - "numpy>=1.24.0", + "numpy>=1.19.4,<1.24.0", "scipy>=1.0.0", "pymongo==3.11.3", + "arctic==1.79.2", "psutil==5.6.6", "pytest>6.2", "Flask>=2.0.1", "Werkzeug>=2.0.1", - "statsmodels==0.14.0", + "statsmodels==0.12.2", "PyPDF2>=2.5.0", "pyarrow>=14.0.1" ], From 75e6f30ca3707444ef614c2bc745de49da0e6808 Mon Sep 17 00:00:00 2001 From: Rob Carver Date: Mon, 20 Nov 2023 15:03:15 +0000 Subject: [PATCH 128/235] accidentally wrote over setup and requirements --- sysproduction/backup_parquet_data_to_remote.py | 10 ++++++++-- sysproduction/data/directories.py | 7 +++++++ 2 files changed, 15 insertions(+), 2 deletions(-) diff --git a/sysproduction/backup_parquet_data_to_remote.py b/sysproduction/backup_parquet_data_to_remote.py index 5de0f9b143..c8481f5bd4 100644 --- a/sysproduction/backup_parquet_data_to_remote.py +++ b/sysproduction/backup_parquet_data_to_remote.py @@ -3,7 +3,7 @@ from sysproduction.data.directories import ( - get_mongo_backup_directory, + get_parquet_backup_directory ) @@ -13,11 +13,17 @@ def backup_parquet_data_to_remote(): data = dataBlob(log_name="backup_mongo_data_as_dump") backup_object = backupParquet(data) - backup_object.backup_mongo_data_as_dump() + backup_object.backup_parquet_data_to_remote() return None + +def get_parquet_directory(): + data = dataBlob() + return data.parquet_root_directory + + class backupParquet(object): def __init__(self, data): self.data = data diff --git a/sysproduction/data/directories.py b/sysproduction/data/directories.py index a6b019bb8e..07907eef5c 100644 --- a/sysproduction/data/directories.py +++ b/sysproduction/data/directories.py @@ -7,6 +7,7 @@ production_config = get_production_config() + def get_main_backup_directory(): ans = production_config.get_element("offsystem_backup_directory") return get_resolved_pathname(ans) @@ -18,6 +19,12 @@ def get_csv_backup_directory(): return ans +def get_parquet_backup_directory(): + main_backup = get_main_backup_directory() + ans = os.path.join(main_backup, "parquet") + + return ans + def get_csv_dump_dir(): ans = production_config.get_element("csv_backup_directory") From d687c7d3afca58d76d4441f4e844514c83a91599 Mon Sep 17 00:00:00 2001 From: Rob Carver Date: Mon, 20 Nov 2023 16:03:30 +0000 Subject: [PATCH 129/235] black and started tweaking codde to run --- dashboard/app.py | 1 - data/tools/contract_comparison.py | 2 +- ...list_of_futures_product_pages_generator.py | 1 - examples/introduction/asimpletradingrule.py | 1 - examples/introduction/prebakedsystems.py | 4 +- .../example_of_custom_run_system.py | 3 - pyproject.toml | 4 +- requirements.txt | 8 +- setup.py | 21 +- sysbrokers/IB/client/ib_accounting_client.py | 4 - sysbrokers/IB/client/ib_client.py | 5 - sysbrokers/IB/client/ib_contracts_client.py | 6 - sysbrokers/IB/client/ib_fx_client.py | 2 - sysbrokers/IB/client/ib_orders_client.py | 2 - sysbrokers/IB/client/ib_price_client.py | 6 - sysbrokers/IB/config/ib_fx_config.py | 1 - sysbrokers/IB/config/ib_instrument_config.py | 6 - sysbrokers/IB/ib_Fx_prices_data.py | 2 - sysbrokers/IB/ib_contract_position_data.py | 2 - sysbrokers/IB/ib_contracts.py | 5 - .../IB/ib_futures_contract_price_data.py | 6 - sysbrokers/IB/ib_futures_contracts_data.py | 1 - sysbrokers/IB/ib_fx_handling.py | 1 - sysbrokers/IB/ib_instruments.py | 1 - sysbrokers/IB/ib_instruments_data.py | 2 - sysbrokers/IB/ib_orders.py | 4 - sysbrokers/IB/ib_positions.py | 3 - sysbrokers/IB/ib_trading_hours.py | 2 - sysbrokers/broker_capital_data.py | 1 - sysbrokers/broker_fx_handling.py | 1 - syscontrol/run_process.py | 1 - syscontrol/strategy_tools.py | 1 - syscontrol/timer_functions.py | 5 - syscore/capital.py | 1 - syscore/dateutils.py | 4 - syscore/interactive/date_input.py | 2 - syscore/interactive/menus.py | 4 - syscore/interactive/progress_bar.py | 1 - syscore/interactive/run_functions.py | 3 +- syscore/maths.py | 1 - syscore/pandas/full_merge_with_replacement.py | 1 - syscore/pandas/list_of_df.py | 2 +- .../pandas/merge_data_keeping_past_data.py | 5 - .../pandas/merge_data_with_label_column.py | 5 - syscore/pandas/pdutils.py | 3 - syscore/tests/test_correlation.py | 1 - sysdata/_DEPRECATED/mongo_log.py | 1 - ...mongo_position_by_contract_TO_DEPRECATE.py | 1 - .../mongo_timed_storage_TO_DEPRECATE.py | 1 - .../_DEPRECATED/timed_storage_TO_DEPRECATE.py | 8 - sysdata/arctic/arctic_adjusted_prices.py | 1 - sysdata/arctic/arctic_capital.py | 2 - sysdata/arctic/arctic_connection.py | 1 - .../arctic_futures_per_contract_prices.py | 5 - .../arctic_historic_contract_positions.py | 1 - .../arctic_historic_strategy_positions.py | 3 - sysdata/arctic/arctic_multiple_prices.py | 3 - sysdata/arctic/arctic_optimal_positions.py | 3 - sysdata/arctic/arctic_spotfx_prices.py | 2 - sysdata/arctic/arctic_spreads.py | 1 - sysdata/config/control_config.py | 1 - sysdata/config/instruments.py | 2 - sysdata/csv/csv_adjusted_prices.py | 2 - sysdata/csv/csv_capital_data.py | 1 - sysdata/csv/csv_contract_position_data.py | 1 - sysdata/csv/csv_futures_contract_prices.py | 4 - sysdata/csv/csv_futures_contracts.py | 2 - sysdata/csv/csv_historic_orders.py | 3 - sysdata/csv/csv_instrument_data.py | 2 - sysdata/csv/csv_multiple_prices.py | 3 - sysdata/csv/csv_optimal_position.py | 1 - sysdata/csv/csv_roll_calendars.py | 2 - sysdata/csv/csv_roll_parameters.py | 2 - sysdata/csv/csv_roll_state_storage.py | 1 - sysdata/csv/csv_spread_costs.py | 1 - sysdata/csv/csv_spreads.py | 2 - sysdata/csv/csv_strategy_position_data.py | 1 - sysdata/data_blob.py | 25 +- sysdata/futures/contracts.py | 3 - .../futures/futures_per_contract_prices.py | 8 - sysdata/futures/roll_calendars.py | 1 - sysdata/futures/rolls_parameters.py | 1 - sysdata/mongodb/mongo_IB_client_id.py | 1 - sysdata/mongodb/mongo_connection.py | 5 - sysdata/mongodb/mongo_futures_contracts.py | 5 - sysdata/mongodb/mongo_generic.py | 4 - sysdata/mongodb/mongo_historic_orders.py | 3 - sysdata/mongodb/mongo_lock_data.py | 1 - sysdata/mongodb/mongo_margin.py | 1 - sysdata/mongodb/mongo_override.py | 1 - sysdata/mongodb/mongo_position_limits.py | 2 - sysdata/mongodb/mongo_process_control.py | 1 - sysdata/mongodb/mongo_roll_state_storage.py | 1 - sysdata/mongodb/mongo_temporary_close.py | 1 - sysdata/mongodb/mongo_temporary_override.py | 1 - sysdata/mongodb/mongo_trade_limits.py | 3 - sysdata/mongodb/tests/test_mongodb.py | 1 - sysdata/parquet/parquet_access.py | 57 +++-- sysdata/parquet/parquet_adjusted_prices.py | 25 +- sysdata/parquet/parquet_capital.py | 33 ++- .../parquet_futures_per_contract_prices.py | 39 ++- .../parquet_historic_contract_positions.py | 36 ++- .../parquet_historic_strategy_positions.py | 27 ++- sysdata/parquet/parquet_multiple_prices.py | 25 +- sysdata/parquet/parquet_optimal_positions.py | 26 +- sysdata/parquet/parquet_spotfx_prices.py | 26 +- sysdata/parquet/parquet_spreads.py | 25 +- sysdata/production/broker_client_id.py | 2 - sysdata/production/capital.py | 8 - .../production/historic_contract_positions.py | 3 - sysdata/production/historic_orders.py | 2 - .../production/historic_strategy_positions.py | 6 - sysdata/production/margin.py | 1 - sysdata/production/optimal_positions.py | 8 - sysdata/production/position_limits.py | 4 - sysdata/production/process_control_data.py | 2 - sysdata/production/trade_limits.py | 2 - sysdata/sim/csv_futures_sim_data.py | 1 - sysdata/sim/csv_futures_sim_test_data.py | 1 - sysdata/sim/db_futures_sim_data.py | 11 +- sysdata/sim/sim_data.py | 1 - sysdata/tools/cleaner.py | 2 - sysdata/tools/manual_price_checker.py | 1 - sysexecution/algos/algo.py | 2 - sysexecution/algos/algo_adaptive.py | 1 - sysexecution/algos/algo_original_best.py | 8 - sysexecution/algos/allocate_algo_to_order.py | 2 - sysexecution/algos/common_functions.py | 3 - .../order_stacks/broker_order_stack.py | 1 - .../order_stacks/contract_order_stack.py | 1 - .../order_stacks/instrument_order_stack.py | 1 - sysexecution/order_stacks/order_stack.py | 4 - sysexecution/orders/base_orders.py | 3 - sysexecution/orders/broker_orders.py | 2 - sysexecution/orders/contract_orders.py | 1 - sysexecution/orders/list_of_orders.py | 1 - .../stack_handler/additional_sampling.py | 2 - sysexecution/stack_handler/balance_trades.py | 2 - .../stack_handler/cancel_and_modify.py | 2 - sysexecution/stack_handler/checks.py | 1 - .../stack_handler/completed_orders.py | 5 - ...eate_broker_orders_from_contract_orders.py | 8 - sysexecution/stack_handler/fills.py | 7 - sysexecution/stack_handler/roll_orders.py | 23 +- .../spawn_children_from_instrument_orders.py | 4 - .../stack_handler/stackHandlerCore.py | 4 - .../strategies/classic_buffered_positions.py | 2 - .../strategies/dynamic_optimised_positions.py | 18 -- .../strategies/strategy_order_handling.py | 3 - sysexecution/tick_data.py | 3 - sysinit/configtools/csvweights_to_yaml.py | 1 - .../adjustedprices_from_db_multiple_to_db.py | 26 +- .../build_multiple_prices_from_raw_data.py | 6 - sysinit/futures/build_roll_calendars.py | 16 +- sysinit/futures/clone_data_for_instrument.py | 8 - .../contract_prices_from_csv_to_arctic.py | 1 + sysinit/futures/create_hourly_and_daily.py | 1 + ...ultiple_and_adjusted_from_csv_to_arctic.py | 1 + ..._from_db_prices_and_csv_calendars_to_db.py | 46 ++-- sysinit/futures/repocsv_spread_costs.py | 3 - .../rollcalendars_from_arcticprices_to_csv.py | 7 +- .../futures/safely_modify_roll_parameters.py | 18 +- sysinit/futures/seed_price_data_from_IB.py | 1 - ...spotfx_from_csvAndInvestingDotCom_to_db.py | 5 +- sysinit/futures/strategy_transfer.py | 2 - sysinit/futures/tests/test_sysinit_futures.py | 1 - sysinit/transfer/backup_arctic_to_parquet.py | 229 +++++++++--------- syslogdiag/email_via_db_interface.py | 1 - syslogdiag/log_entry.py | 1 - syslogdiag/mongo_email_control.py | 2 - syslogdiag/pst_logger.py | 1 - syslogging/server.py | 1 - sysobjects/adjusted_prices.py | 1 - sysobjects/contract_dates_and_expiries.py | 2 - sysobjects/contracts.py | 3 - .../dict_of_futures_per_contract_prices.py | 2 - ...ct_of_named_futures_per_contract_prices.py | 2 - sysobjects/fills.py | 2 - sysobjects/instruments.py | 3 - sysobjects/multiple_prices.py | 2 - sysobjects/production/backtest_storage.py | 1 - sysobjects/production/capital.py | 1 - sysobjects/production/optimal_positions.py | 3 - sysobjects/production/override.py | 1 - sysobjects/production/position_limits.py | 1 - sysobjects/production/positions.py | 2 - sysobjects/production/trade_limits.py | 1 - sysobjects/production/tradeable_object.py | 1 - ...on_of_weekly_and_specific_trading_hours.py | 4 - .../weekly_trading_hours_any_day.py | 2 - sysobjects/roll_calendars.py | 1 - sysobjects/roll_parameters_with_price_data.py | 3 - sysobjects/rolls.py | 4 - sysobjects/spot_fx_prices.py | 1 - sysproduction/backup_db_to_csv.py | 31 +-- sysproduction/data/backtest.py | 1 - sysproduction/data/broker.py | 7 - sysproduction/data/capital.py | 8 +- sysproduction/data/contracts.py | 17 +- sysproduction/data/control_process.py | 7 +- sysproduction/data/controls.py | 13 - sysproduction/data/currency_data.py | 1 + sysproduction/data/generic_production_data.py | 1 - sysproduction/data/instruments.py | 17 +- sysproduction/data/optimal_positions.py | 17 +- sysproduction/data/orders.py | 15 +- sysproduction/data/positions.py | 30 ++- sysproduction/data/prices.py | 17 +- sysproduction/data/production_data_objects.py | 28 +-- sysproduction/data/reports.py | 3 - sysproduction/data/risk.py | 3 - sysproduction/data/sim_data.py | 13 +- sysproduction/data/volumes.py | 6 +- sysproduction/interactive_controls.py | 10 - sysproduction/interactive_diagnostics.py | 3 - ...eractive_manual_check_historical_prices.py | 1 - sysproduction/interactive_order_stack.py | 3 - .../interactive_update_capital_manual.py | 2 - .../interactive_update_roll_status.py | 15 -- sysproduction/linux/scripts/run.py | 1 - .../reporting/account_curve_report.py | 3 - .../reporting/adhoc/dynamic_optimisation.py | 9 - .../reporting/adhoc/instrument_list.py | 2 - .../reporting/adhoc/static_system.py | 2 - .../reporting/adhoc/trading_rule_pandl.py | 2 - sysproduction/reporting/api.py | 13 - sysproduction/reporting/costs_report.py | 1 - sysproduction/reporting/data/costs.py | 6 - .../data/duplicate_remove_markets.py | 2 - sysproduction/reporting/data/pandl.py | 3 - sysproduction/reporting/data/positions.py | 1 - sysproduction/reporting/data/pricechanges.py | 4 - sysproduction/reporting/data/risk.py | 24 -- sysproduction/reporting/data/rolls.py | 6 - sysproduction/reporting/data/trades.py | 3 - sysproduction/reporting/data/volume.py | 1 - .../reporting/duplicate_market_report.py | 1 - sysproduction/reporting/formatting.py | 2 - .../reporting/instrument_risk_report.py | 1 - .../reporting/market_monitor_report.py | 2 - .../reporting/minimum_capital_report.py | 1 - .../reporting/remove_markets_report.py | 1 - .../reporting/reporting_functions.py | 4 - sysproduction/reporting/slippage_report.py | 1 - sysproduction/reporting/strategies_report.py | 2 - .../strategy_code/report_system_classic.py | 1 - .../report_system_dynamic_optimised.py | 2 - .../run_dynamic_optimised_system.py | 5 - .../strategy_code/run_system_classic.py | 3 - sysproduction/update_historical_prices.py | 3 - .../update_multiple_adjusted_prices.py | 2 - sysproduction/update_sampled_contracts.py | 7 - sysproduction/update_strategy_capital.py | 1 - sysproduction/update_strategy_orders.py | 1 - .../estimators/clustering_correlations.py | 3 - sysquant/estimators/correlation_estimator.py | 1 - sysquant/estimators/correlation_over_time.py | 2 - sysquant/estimators/correlations.py | 5 - sysquant/estimators/covariance.py | 3 - .../estimators/diversification_multipliers.py | 3 +- sysquant/estimators/estimates.py | 4 - .../estimators/exponential_correlation.py | 7 +- sysquant/estimators/generic_estimator.py | 1 - sysquant/estimators/mean_estimator.py | 5 - sysquant/estimators/pooled_correlation.py | 1 - sysquant/estimators/stdev_estimator.py | 3 - sysquant/estimators/vol.py | 2 - sysquant/fitting_dates.py | 2 - sysquant/optimisation/SR_adjustment.py | 6 - sysquant/optimisation/cleaning.py | 2 - sysquant/optimisation/full_handcrafting.py | 2 - sysquant/optimisation/generic_optimiser.py | 1 - sysquant/optimisation/optimise_over_time.py | 1 - .../optimisation/optimisers/call_optimiser.py | 2 - .../optimisation/optimisers/equal_weights.py | 1 - sysquant/optimisation/optimisers/handcraft.py | 7 - .../optimisation/optimisers/one_period.py | 1 - sysquant/optimisation/optimisers/shrinkage.py | 1 - sysquant/optimisation/portfolio_optimiser.py | 5 - sysquant/optimisation/pre_processing.py | 7 - sysquant/optimisation/shared.py | 2 - sysquant/portfolio_risk.py | 3 - sysquant/returns.py | 5 - .../accounts/account_buffering_subsystem.py | 1 - systems/accounts/account_buffering_system.py | 1 - systems/accounts/account_costs.py | 5 - systems/accounts/account_forecast.py | 8 - systems/accounts/account_inputs.py | 3 - systems/accounts/account_instruments.py | 3 - systems/accounts/account_subsystem.py | 6 - systems/accounts/account_trading_rules.py | 7 - systems/accounts/account_with_multiplier.py | 1 - systems/accounts/curves/account_curve.py | 2 - .../accounts/curves/account_curve_group.py | 2 - .../curves/nested_account_curve_group.py | 1 - systems/accounts/curves/stats_dict.py | 6 - .../account_curve_order_simulator.py | 3 - .../order_simulator/fills_and_orders.py | 2 - .../order_simulator/hourly_limit_orders.py | 1 - .../order_simulator/hourly_market_orders.py | 1 - .../order_simulator/pandl_order_simulator.py | 3 - .../pandl_calculators/pandl_calculation.py | 5 - .../pandl_calculation_dict.py | 2 - .../pandl_calculators/pandl_cash_costs.py | 1 - .../pandl_calculators/pandl_generic_costs.py | 1 - .../pandl_calculators/pandl_using_fills.py | 2 - systems/basesystem.py | 5 - systems/buffering.py | 3 - systems/diagoutput.py | 2 - systems/forecast_combine.py | 6 - systems/forecasting.py | 1 - systems/portfolio.py | 17 -- systems/positionsizing.py | 1 - .../vol_attenuation_forecast_scale_cap.py | 1 - systems/provided/basic/system.py | 3 - .../accounts_stage.py | 3 - .../buffering.py | 3 - .../optimisation.py | 2 - .../optimised_positions_stage.py | 8 - .../set_up_constraints.py | 3 - .../example/daily_with_order_simulation.py | 1 - .../example/hourly_with_order_simulation.py | 1 - .../provided/example/simplesystemconfig.yaml | 2 +- .../futures_chapter15/futuresconfig.yaml | 6 +- systems/provided/rob_system/run_system.py | 1 - .../optimise_small_system.py | 5 - systems/rawdata.py | 1 - systems/risk_overlay.py | 1 - systems/system_cache.py | 2 - systems/tests/test_cache.py | 4 - systems/tests/test_forecast_combine.py | 6 - systems/tests/test_forecast_scale_cap.py | 3 - systems/tests/test_forecasts.py | 3 - systems/tests/test_portfolio.py | 1 - systems/tests/test_rawdata.py | 1 - systems/tests/testfuturesrawdata.py | 1 - systems/tools/autogroup.py | 1 - systems/trading_rules.py | 11 - tests/test_examples.py | 14 -- tox.ini | 2 +- 340 files changed, 582 insertions(+), 1280 deletions(-) diff --git a/dashboard/app.py b/dashboard/app.py index 699d183bab..31344b35fa 100644 --- a/dashboard/app.py +++ b/dashboard/app.py @@ -299,7 +299,6 @@ def trades(): @app.route("/strategy") def strategy(): - return {} diff --git a/data/tools/contract_comparison.py b/data/tools/contract_comparison.py index a210a6778b..235c90528d 100644 --- a/data/tools/contract_comparison.py +++ b/data/tools/contract_comparison.py @@ -1,4 +1,3 @@ - from sysproduction.data.prices import diagPrices from sysobjects.contracts import futuresContract @@ -6,6 +5,7 @@ diag_prices = diagPrices() + class ContractComparison: """Class for comparing futures contracts side by side on different dimensions""" diff --git a/docs/doc_generation/list_of_futures_product_pages_generator.py b/docs/doc_generation/list_of_futures_product_pages_generator.py index 0b9d05fb2d..cb521e78d6 100644 --- a/docs/doc_generation/list_of_futures_product_pages_generator.py +++ b/docs/doc_generation/list_of_futures_product_pages_generator.py @@ -14,7 +14,6 @@ symbols_list.sort() with open(str(write_path / "list_of_futures_product_pages.md"), "w") as f: - f.writelines(f"# List of futures product pages") f.writelines("\n") f.writelines( diff --git a/examples/introduction/asimpletradingrule.py b/examples/introduction/asimpletradingrule.py index deef6bb544..5d78ee8e85 100644 --- a/examples/introduction/asimpletradingrule.py +++ b/examples/introduction/asimpletradingrule.py @@ -114,7 +114,6 @@ def calc_ewmac_forecast(price, Lfast, Lslow=None): from systems.accounts.account_forecast import pandl_for_instrument_forecast account = pandl_for_instrument_forecast(forecast=ewmac, price=price) -account2 = pandl_for_instrument_forecast(forecast=ewmac, price=price) account.curve() diff --git a/examples/introduction/prebakedsystems.py b/examples/introduction/prebakedsystems.py index 364dd39e84..6e062e09f3 100644 --- a/examples/introduction/prebakedsystems.py +++ b/examples/introduction/prebakedsystems.py @@ -2,7 +2,7 @@ my_system = simplesystem() print(my_system) -print(my_system.portfolio.get_notional_position("EDOLLAR").tail(5)) +print(my_system.portfolio.get_notional_position("SOFR").tail(5)) from sysdata.sim.csv_futures_sim_data import csvFuturesSimData from sysdata.config.configdata import Config @@ -14,7 +14,7 @@ my_config = Config("systems.provided.example.simplesystemconfig.yaml") my_data = csvFuturesSimData() my_system = simplesystem(config=my_config, data=my_data) -print(my_system.portfolio.get_notional_position("EDOLLAR").tail(5)) +print(my_system.portfolio.get_notional_position("SOFR").tail(5)) """ Let's get the chapter 15 system """ diff --git a/examples/production/example_of_custom_run_system.py b/examples/production/example_of_custom_run_system.py index 49da96a7b8..eff71bfbb6 100644 --- a/examples/production/example_of_custom_run_system.py +++ b/examples/production/example_of_custom_run_system.py @@ -13,7 +13,6 @@ class runMySystemCarryTrendDynamic(runSystemCarryTrendDynamic): - # DO NOT CHANGE THE NAME OF THIS FUNCTION; IT IS HARDCODED INTO CONFIGURATION FILES # BECAUSE IT IS ALSO USED TO LOAD BACKTESTS def system_method( @@ -40,7 +39,6 @@ def production_carry_trend_dynamic_system( notional_trading_capital: float = arg_not_supplied, base_currency: str = arg_not_supplied, ) -> System: - sim_data = get_sim_data_object_for_production(data) config = Config(config_filename) @@ -76,7 +74,6 @@ def production_carry_trend_dynamic_system( def futures_system(data, config): - system = System( [ Risk(), diff --git a/pyproject.toml b/pyproject.toml index dd348d629d..937e6ddb48 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -49,5 +49,5 @@ testpaths = [ [tool.black] line-length = 88 -target-version = ['py38'] -required-version = '22.12.0' +target-version = ['py310'] +required-version = '23.11.0' diff --git a/requirements.txt b/requirements.txt index 6d381481d0..3c81597598 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,7 +1,7 @@ -pandas==1.0.5 +pandas==2.1.3 matplotlib>=3.0.0 -pyyaml==5.4 -numpy>=1.19.4,<1.24.0 +pyyaml==5.3.1 +numpy>=1.24.0 scipy>=1.0.0 pymongo==3.11.3 arctic==1.79.2 @@ -10,6 +10,6 @@ psutil==5.6.6 pytest>6.2 Flask>=2.0.1 Werkzeug>=2.0.1 -statsmodels==0.13.0 +statsmodels==0.14.0 PyPDF2>=2.5.0 pyarrow>=14.0.1 \ No newline at end of file diff --git a/setup.py b/setup.py index 454f4b819a..0480bb26bc 100755 --- a/setup.py +++ b/setup.py @@ -5,17 +5,10 @@ from setuptools import setup, find_packages from distutils.version import StrictVersion -if StrictVersion(platform.python_version()) <= StrictVersion("3.7.0"): +if StrictVersion(platform.python_version()) <= StrictVersion("3.10.0"): print("pysystemtrade requires Python 3.7.0 or later. Exiting.", file=sys.stderr) sys.exit(1) -if StrictVersion(platform.python_version()) >= StrictVersion("3.9.0"): - print( - "pysystemtrade requires Python 3.8.* or earlier (pandas issue). Exiting.", - file=sys.stderr, - ) - sys.exit(1) - def read(fname): """Utility function to read the README file.""" @@ -24,7 +17,7 @@ def read(fname): def package_files(directory, extension="yaml"): paths = [] - for (path, directories, filenames) in os.walk(directory): + for path, directories, filenames in os.walk(directory): for filename in filenames: if filename.split(".")[-1] == extension: paths.append(os.path.join("..", path, filename)) @@ -91,11 +84,11 @@ def dir_this_file(): package_data=package_data, long_description=read("README.md"), install_requires=[ - "pandas==1.0.5", + "pandas==2.1.3", "matplotlib>=3.0.0", "ib-insync==0.9.86", - "PyYAML>=5.4", - "numpy>=1.19.4,<1.24.0", + "PyYAML>=5.3", + "numpy>=1.24.0", "scipy>=1.0.0", "pymongo==3.11.3", "arctic==1.79.2", @@ -103,9 +96,9 @@ def dir_this_file(): "pytest>6.2", "Flask>=2.0.1", "Werkzeug>=2.0.1", - "statsmodels==0.12.2", + "statsmodels==0.14.0", "PyPDF2>=2.5.0", - "pyarrow>=14.0.1" + "pyarrow>=14.0.1", ], tests_require=["nose", "flake8"], extras_require=dict(), diff --git a/sysbrokers/IB/client/ib_accounting_client.py b/sysbrokers/IB/client/ib_accounting_client.py index e4ef05fd56..d978ad5ff4 100644 --- a/sysbrokers/IB/client/ib_accounting_client.py +++ b/sysbrokers/IB/client/ib_accounting_client.py @@ -14,7 +14,6 @@ class ibAccountingClient(ibClient): def broker_get_account_value_across_currency( self, account_id: str = arg_not_supplied ) -> listOfCurrencyValues: - list_of_values_per_currency = self._get_named_value_across_currency( named_value="NetLiquidation", account_id=account_id ) @@ -24,7 +23,6 @@ def broker_get_account_value_across_currency( def broker_get_excess_liquidity_value_across_currency( self, account_id: str = arg_not_supplied ) -> listOfCurrencyValues: - list_of_values_per_currency = self._get_named_value_across_currency( named_value="FullExcessLiquidity", account_id=account_id ) @@ -34,7 +32,6 @@ def broker_get_excess_liquidity_value_across_currency( def _get_named_value_across_currency( self, named_value: str, account_id: str = arg_not_supplied ) -> listOfCurrencyValues: - list_of_currencies = self._get_list_of_currencies_for_named_values(named_value) list_of_values_per_currency = list( [ @@ -154,7 +151,6 @@ def _record_cache_update(self): self._account_summary_data_update = datetime.datetime.now() def _ib_get_account_summary_from_broker(self) -> dict: - account_summary_rawdata = self.ib.accountSummary() # Weird format let's clean it up diff --git a/sysbrokers/IB/client/ib_client.py b/sysbrokers/IB/client/ib_client.py index 7180604c8c..561d57bdca 100644 --- a/sysbrokers/IB/client/ib_client.py +++ b/sysbrokers/IB/client/ib_client.py @@ -67,7 +67,6 @@ class ibClient(object): def __init__( self, ibconnection: connectionIB, log: pst_logger = get_logger("ibClient") ): - # means our first call won't be throttled for pacing self.last_historic_price_calltime = ( datetime.datetime.now() @@ -144,7 +143,6 @@ def refresh(self): def get_instrument_code_from_broker_contract_object( self, broker_contract_object: ibContract ) -> str: - broker_identity = self.broker_identity_for_contract(broker_contract_object) instrument_code = self.get_instrument_code_from_broker_identity_for_contract( broker_identity @@ -174,7 +172,6 @@ def ib_config(self) -> IBconfig: return config def _get_and_set_ib_config_from_file(self) -> IBconfig: - config_data = read_ib_config_from_file(log=self.log) return config_data @@ -183,7 +180,6 @@ def broker_identity_for_contract( self, ib_contract_pattern: ibContract, ) -> IBInstrumentIdentity: - contract_details = self.get_contract_details( ib_contract_pattern=ib_contract_pattern, allow_expired=False, @@ -203,7 +199,6 @@ def get_contract_details( allow_expired: bool = False, allow_multiple_contracts: bool = False, ) -> Union[ibContractDetails, List[ibContractDetails]]: - contract_details = self._get_contract_details( ib_contract_pattern, allow_expired=allow_expired ) diff --git a/sysbrokers/IB/client/ib_contracts_client.py b/sysbrokers/IB/client/ib_contracts_client.py index a9082047da..1ab12fa74b 100644 --- a/sysbrokers/IB/client/ib_contracts_client.py +++ b/sysbrokers/IB/client/ib_contracts_client.py @@ -102,7 +102,6 @@ def ib_get_trading_hours( def _ib_get_uncached_trading_hours( self, contract_object_with_ib_data: futuresContract ) -> listOfTradingHours: - specific_log = contract_object_with_ib_data.specific_log(self.log) try: @@ -157,7 +156,6 @@ def ib_get_trading_hours_from_IB( def ib_get_saved_weekly_trading_hours_for_contract( self, contract_object_with_ib_data: futuresContract ) -> weekdayDictOfListOfTradingHoursAnyDay: - try: weekly_hours_for_timezone = ( self.ib_get_saved_weekly_trading_hours_for_timezone_of_contract( @@ -197,7 +195,6 @@ def ib_get_saved_weekly_trading_hours_for_contract( def ib_get_saved_weekly_trading_hours_custom_for_contract( self, contract_object_with_ib_data: futuresContract ) -> weekdayDictOfListOfTradingHoursAnyDay: - instrument_code = contract_object_with_ib_data.instrument_code all_saved_trading_hours = self.get_all_saved_weekly_trading_hours() specific_weekly_hours_for_contract = all_saved_trading_hours.get( @@ -343,7 +340,6 @@ def ib_futures_contract( always_return_single_leg=False, trade_list_for_multiple_legs: tradeQuantity = None, ) -> Contract: - ibcontract_with_legs = self.ib_futures_contract_with_legs( futures_contract_with_ib_data=futures_contract_with_ib_data, allow_expired=allow_expired, @@ -386,7 +382,6 @@ def _get_stored_or_live_contract( trade_list_for_multiple_legs: tradeQuantity = None, allow_expired: bool = False, ): - ibcontract_with_legs = self.cache.get( self._get_ib_futures_contract_from_broker, contract_object_to_use, @@ -445,7 +440,6 @@ def _get_vanilla_ib_futures_contract_with_legs( contract_date: contractDate, allow_expired: bool = False, ) -> ibcontractWithLegs: - ibcontract = self._get_vanilla_ib_futures_contract( futures_instrument_with_ib_data, contract_date, allow_expired=allow_expired ) diff --git a/sysbrokers/IB/client/ib_fx_client.py b/sysbrokers/IB/client/ib_fx_client.py index 4faf02da55..0f2f9e3b49 100644 --- a/sysbrokers/IB/client/ib_fx_client.py +++ b/sysbrokers/IB/client/ib_fx_client.py @@ -57,7 +57,6 @@ def broker_fx_market_order( def _create_fx_market_order_for_submission( self, trade: float, account_id: str = arg_not_supplied ) -> MarketOrder: - ib_BS_str, ib_qty = resolveBS(trade) ib_order = MarketOrder(ib_BS_str, ib_qty) if account_id is not arg_not_supplied: @@ -93,7 +92,6 @@ def broker_get_daily_fx_data( return fx_data def ib_spotfx_contract(self, ccy1, ccy2="USD") -> Forex: - ibcontract = Forex(ccy1 + ccy2) ibcontract = self.ib_resolve_unique_contract(ibcontract) diff --git a/sysbrokers/IB/client/ib_orders_client.py b/sysbrokers/IB/client/ib_orders_client.py index 1c5fd41b21..0a4c8cd12f 100644 --- a/sysbrokers/IB/client/ib_orders_client.py +++ b/sysbrokers/IB/client/ib_orders_client.py @@ -134,7 +134,6 @@ def _build_ib_order( order_type: brokerOrderType = market_order_type, limit_price: float = None, ) -> ibOrder: - ib_BS_str, ib_qty = resolveBS_for_list(trade_list) if order_type is market_order_type: @@ -199,7 +198,6 @@ def modify_limit_price_given_original_objects( original_contract_object_with_legs: ibcontractWithLegs, new_limit_price: float, ) -> tradeWithContract: - original_contract_object = original_contract_object_with_legs.ibcontract original_order_object.lmtPrice = new_limit_price diff --git a/sysbrokers/IB/client/ib_price_client.py b/sysbrokers/IB/client/ib_price_client.py index 8de70e23b1..a40cca3cff 100644 --- a/sysbrokers/IB/client/ib_price_client.py +++ b/sysbrokers/IB/client/ib_price_client.py @@ -74,7 +74,6 @@ def get_ticker_object_with_BS( contract_object_with_ib_data: futuresContract, trade_list_for_multiple_legs: tradeQuantity = None, ) -> tickerWithBS: - ib_ticker = self.get_ib_ticker_object( contract_object_with_ib_data, trade_list_for_multiple_legs ) @@ -92,7 +91,6 @@ def get_ib_ticker_object( contract_object_with_ib_data: futuresContract, trade_list_for_multiple_legs: tradeQuantity = None, ) -> "ib.ticker": - specific_log = contract_object_with_ib_data.specific_log(self.log) try: @@ -123,7 +121,6 @@ def cancel_market_data_for_contract_and_trade_qty( contract_object_with_ib_data: futuresContract, trade_list_for_multiple_legs: tradeQuantity = None, ): - specific_log = contract_object_with_ib_data.specific_log(self.log) try: @@ -220,7 +217,6 @@ def _get_generic_data_for_contract( def _raw_ib_data_to_df( self, price_data_raw: pd.DataFrame, log: pst_logger ) -> pd.DataFrame: - if price_data_raw is None: log.warning("No price data from IB") raise missingData @@ -254,7 +250,6 @@ def _ib_timestamp_to_datetime(self, timestamp_ib) -> datetime.datetime: return adjusted_ts def _adjust_ib_time_to_local(self, timestamp_ib) -> datetime.datetime: - if getattr(timestamp_ib, "tz_localize", None) is None: # daily, nothing to do return timestamp_ib @@ -308,7 +303,6 @@ def _ib_get_historical_data_of_duration_and_barSize( def _get_barsize_and_duration_from_frequency(bar_freq: Frequency) -> (str, str): - barsize_lookup = dict( [ (Frequency.Day, "1 day"), diff --git a/sysbrokers/IB/config/ib_fx_config.py b/sysbrokers/IB/config/ib_fx_config.py index b7fcce818e..c5043fcb4a 100644 --- a/sysbrokers/IB/config/ib_fx_config.py +++ b/sysbrokers/IB/config/ib_fx_config.py @@ -35,7 +35,6 @@ def config_info_for_code(config_data: pd.DataFrame, currency_code) -> ibFXConfig def get_list_of_codes(config_data: pd.DataFrame) -> list: - list_of_codes = list(config_data.CODE) return list_of_codes diff --git a/sysbrokers/IB/config/ib_instrument_config.py b/sysbrokers/IB/config/ib_instrument_config.py index d196450628..bb93517f49 100644 --- a/sysbrokers/IB/config/ib_instrument_config.py +++ b/sysbrokers/IB/config/ib_instrument_config.py @@ -35,7 +35,6 @@ def read_ib_config_from_file(log: pst_logger = get_logger("")) -> IBconfig: def get_instrument_object_from_config( instrument_code: str, config: IBconfig = None, log: pst_logger = get_logger("") ) -> futuresInstrumentWithIBConfigData: - new_log = log.setup(instrument_code=instrument_code) if config is None: @@ -67,7 +66,6 @@ def get_instrument_object_from_config( def _get_instrument_object_from_valid_config( instrument_code: str, config: IBconfig = None ) -> futuresInstrumentWithIBConfigData: - config_row = config[config.Instrument == instrument_code] symbol = config_row.IBSymbol.values[0] exchange = config_row.IBExchange.values[0] @@ -113,7 +111,6 @@ def get_instrument_code_from_broker_instrument_identity( ib_instrument_identity: IBInstrumentIdentity, log: pst_logger = get_logger(""), ) -> str: - ib_code = ib_instrument_identity.ib_code ib_multiplier = ib_instrument_identity.ib_multiplier ib_exchange = ib_instrument_identity.ib_exchange @@ -148,7 +145,6 @@ def get_instrument_code_from_broker_instrument_identity( raise Exception(msg) if len(config_rows) > 1: - msg = ( "Broker symbol %s (%s, %f) appears more than once in configuration file!" % (ib_code, ib_exchange, ib_multiplier) @@ -162,7 +158,6 @@ def get_instrument_code_from_broker_instrument_identity( def _get_relevant_config_rows_from_broker_instrument_identity_using_multiple_valid_exchanges( config: IBconfig, ib_instrument_identity: IBInstrumentIdentity ) -> pd.Series: - ib_code = ib_instrument_identity.ib_code ib_multiplier = ib_instrument_identity.ib_multiplier ib_valid_exchange = ib_instrument_identity.ib_valid_exchange @@ -187,7 +182,6 @@ def _get_relevant_config_rows_from_broker_instrument_identity_using_multiple_val def _get_relevant_config_rows_from_broker_instrument_identity_fields( config: IBconfig, ib_code: str, ib_multiplier: float, ib_exchange: str ) -> pd.Series: - config_rows = config[ (config.IBSymbol == ib_code) & (config.IBMultiplier == ib_multiplier) diff --git a/sysbrokers/IB/ib_Fx_prices_data.py b/sysbrokers/IB/ib_Fx_prices_data.py index 7da66b699c..2bb93de4b9 100644 --- a/sysbrokers/IB/ib_Fx_prices_data.py +++ b/sysbrokers/IB/ib_Fx_prices_data.py @@ -99,7 +99,6 @@ def _get_raw_fx_prices(self, ib_config_for_code: ibFXConfig) -> pd.Series: return raw_fx_prices_as_series def _get_config_info_for_code(self, currency_code: str) -> ibFXConfig: - try: config_data = self._get_ib_fx_config() except missingFile as e: @@ -124,7 +123,6 @@ def _get_ib_fx_config(self) -> pd.DataFrame: return config def _get_and_set_ib_config_from_file(self) -> pd.DataFrame: - config_data = get_ib_config_from_file(log=self.log) self._config = config_data diff --git a/sysbrokers/IB/ib_contract_position_data.py b/sysbrokers/IB/ib_contract_position_data.py index b9bc58bd45..e581fd5f20 100644 --- a/sysbrokers/IB/ib_contract_position_data.py +++ b/sysbrokers/IB/ib_contract_position_data.py @@ -51,7 +51,6 @@ def futures_instrument_data(self) -> ibFuturesInstrumentData: def get_all_current_positions_as_list_with_contract_objects( self, account_id=arg_not_supplied ) -> listOfContractPositions: - all_positions = self._get_all_futures_positions_as_raw_list( account_id=account_id ) @@ -90,7 +89,6 @@ def _get_contract_position_for_raw_entry(self, position_entry) -> contractPositi return contract_position_object def _get_instrument_code_from_ib_position_entry(self, position_entry) -> str: - ib_contract = position_entry["ib_contract"] instrument_code = self.futures_instrument_data.get_instrument_code_from_broker_contract_object( ib_contract diff --git a/sysbrokers/IB/ib_contracts.py b/sysbrokers/IB/ib_contracts.py index 4ade81a76d..ac732a0ce8 100644 --- a/sysbrokers/IB/ib_contracts.py +++ b/sysbrokers/IB/ib_contracts.py @@ -24,7 +24,6 @@ def resolve_multiple_expiries( ibcontract_list: list, futures_instrument_with_ib_data: futuresInstrumentWithIBConfigData, ) -> ibContract: - code = futures_instrument_with_ib_data.instrument_code ib_data = futures_instrument_with_ib_data.ib_data ignore_weekly = ib_data.ignoreWeekly @@ -58,7 +57,6 @@ def resolve_multiple_expiries_for_EUREX(ibcontract_list: list) -> ibContract: def resolve_multiple_expiries_for_VIX(ibcontract_list: list) -> ibContract: - # Get the symbols resolved_contract = resolve_multiple_expiries_for_generic_futures( ibcontract_list=ibcontract_list, is_monthly_function=_is_vix_symbol_monthly @@ -70,7 +68,6 @@ def resolve_multiple_expiries_for_VIX(ibcontract_list: list) -> ibContract: def resolve_multiple_expiries_for_generic_futures( ibcontract_list: list, is_monthly_function: Callable ) -> ibContract: - # Get the symbols contract_symbols = [ibcontract.localSymbol for ibcontract in ibcontract_list] @@ -128,7 +125,6 @@ def get_ib_contract_with_specific_expiry( futures_instrument_with_ib_data: futuresInstrumentWithIBConfigData, contract_date: contractDate, ) -> Contract: - ibcontract = ib_futures_instrument(futures_instrument_with_ib_data) contract_date_string = str(contract_date.date_str) @@ -172,7 +168,6 @@ def _add_legs_to_ib_contract( trade_list_for_multiple_legs: tradeQuantity, resolved_legs: list, ) -> ibcontractWithLegs: - ratio_list = list_of_ints_with_highest_common_factor_positive_first( trade_list_for_multiple_legs ) diff --git a/sysbrokers/IB/ib_futures_contract_price_data.py b/sysbrokers/IB/ib_futures_contract_price_data.py index ff9ba6de61..ed76044895 100644 --- a/sysbrokers/IB/ib_futures_contract_price_data.py +++ b/sysbrokers/IB/ib_futures_contract_price_data.py @@ -118,7 +118,6 @@ def get_list_of_instrument_codes_with_merged_price_data(self) -> list: def contracts_with_merged_price_data_for_instrument_code( self, instrument_code: str, allow_expired=True ) -> listOfFuturesContracts: - futures_instrument_with_ib_data = ( self.futures_instrument_data.get_futures_instrument_object_with_IB_data( instrument_code @@ -142,7 +141,6 @@ def get_contracts_with_merged_price_data(self): def get_prices_at_frequency_for_potentially_expired_contract_object( self, contract: futuresContract, freq: Frequency = DAILY_PRICE_FREQ ) -> futuresContractPrices: - price_data = self._get_prices_at_frequency_for_contract_object_no_checking_with_expiry_flag( contract, frequency=freq, allow_expired=True ) @@ -159,7 +157,6 @@ def get_prices_at_frequency_for_contract_object( frequency: Frequency, return_empty: bool = True, ): - ## Override this because don't want to check for existing data first try: @@ -177,7 +174,6 @@ def get_prices_at_frequency_for_contract_object( def _get_prices_at_frequency_for_contract_object_no_checking( self, futures_contract_object: futuresContract, frequency: Frequency ) -> futuresContractPrices: - return self._get_prices_at_frequency_for_contract_object_no_checking_with_expiry_flag( futures_contract_object=futures_contract_object, frequency=frequency, @@ -190,7 +186,6 @@ def _get_prices_at_frequency_for_contract_object_no_checking_with_expiry_flag( frequency: Frequency, allow_expired: bool = False, ) -> futuresContractPrices: - """ Get historical prices at a particular frequency @@ -227,7 +222,6 @@ def _get_prices_at_frequency_for_ibcontract_object_no_checking( freq: Frequency, allow_expired: bool = False, ) -> futuresContractPrices: - new_log = contract_object_with_ib_broker_config.log(self.log) try: diff --git a/sysbrokers/IB/ib_futures_contracts_data.py b/sysbrokers/IB/ib_futures_contracts_data.py index a1a5a09a51..d9ab4931c6 100644 --- a/sysbrokers/IB/ib_futures_contracts_data.py +++ b/sysbrokers/IB/ib_futures_contracts_data.py @@ -130,7 +130,6 @@ def _get_actual_expiry_date_given_single_contract_with_ib_metadata( def _get_contract_object_with_IB_metadata( self, contract_object: futuresContract ) -> futuresContract: - try: futures_instrument_with_ib_data = ( self._get_futures_instrument_object_with_IB_data( diff --git a/sysbrokers/IB/ib_fx_handling.py b/sysbrokers/IB/ib_fx_handling.py index fa2e4da979..71f75d6469 100644 --- a/sysbrokers/IB/ib_fx_handling.py +++ b/sysbrokers/IB/ib_fx_handling.py @@ -46,7 +46,6 @@ def broker_fx_market_order( account_id: str = arg_not_supplied, ccy2: str = "USD", ) -> tradeWithContract: - submitted_fx_trade = self.ib_client.broker_fx_market_order( trade, ccy1, account_id=account_id, ccy2=ccy2 ) diff --git a/sysbrokers/IB/ib_instruments.py b/sysbrokers/IB/ib_instruments.py index 991d232491..4120e9ec92 100644 --- a/sysbrokers/IB/ib_instruments.py +++ b/sysbrokers/IB/ib_instruments.py @@ -89,7 +89,6 @@ def ib_futures_instrument( if ib_data.ibMultiplier is NOT_REQUIRED_FOR_IB: pass else: - ibcontract.multiplier = _resolve_multiplier(ib_data.ibMultiplier) if ib_data.currency is NOT_REQUIRED_FOR_IB: diff --git a/sysbrokers/IB/ib_instruments_data.py b/sysbrokers/IB/ib_instruments_data.py index 8077bfa01e..c5598f34a4 100644 --- a/sysbrokers/IB/ib_instruments_data.py +++ b/sysbrokers/IB/ib_instruments_data.py @@ -49,7 +49,6 @@ def _get_instrument_data_without_checking(self, instrument_code: str): def get_futures_instrument_object_with_IB_data( self, instrument_code: str ) -> futuresInstrumentWithIBConfigData: - config = self.ib_config instrument_object = get_instrument_object_from_config( instrument_code, log=self.log, config=config @@ -101,7 +100,6 @@ def ibconnection(self) -> connectionIB: return self._ibconnection def _get_and_set_ib_config_from_file(self) -> IBconfig: - config_data = read_ib_config_from_file(log=self.log) return config_data diff --git a/sysbrokers/IB/ib_orders.py b/sysbrokers/IB/ib_orders.py index c97586e977..aae8d5926c 100644 --- a/sysbrokers/IB/ib_orders.py +++ b/sysbrokers/IB/ib_orders.py @@ -40,7 +40,6 @@ def __init__( instrument_code: str = None, ticker_object: tickerObject = None, ): - if broker_order is None: # This might happen if for example we are getting the orders from # IB @@ -376,7 +375,6 @@ def match_db_broker_order_to_control_order_from_brokers( return matched_control_order def cancel_order_on_stack(self, broker_order: brokerOrder): - log = broker_order.log_with_attributes(self.log) matched_control_order = ( self.match_db_broker_order_to_control_order_from_brokers(broker_order) @@ -480,7 +478,6 @@ def get_status_for_control_object( def add_trade_info_to_broker_order( broker_order: brokerOrder, broker_order_from_trade_object: ibBrokerOrder ) -> brokerOrder: - new_broker_order = copy(broker_order) keys_to_replace = [ "broker_permid", @@ -532,7 +529,6 @@ def match_control_order_on_permid( def match_control_order_from_dict( dict_of_broker_control_orders: dict, broker_order_to_match: brokerOrder ): - matched_control_order_from_dict = dict_of_broker_control_orders.get( broker_order_to_match.broker_tempid, missing_order ) diff --git a/sysbrokers/IB/ib_positions.py b/sysbrokers/IB/ib_positions.py index e6a54e02c8..2528794405 100644 --- a/sysbrokers/IB/ib_positions.py +++ b/sysbrokers/IB/ib_positions.py @@ -87,7 +87,6 @@ def from_ib_positions_to_dict( def resolve_ib_stock_position(position): - return dict( account=position.account, symbol=position.contract.symbol, @@ -101,7 +100,6 @@ def resolve_ib_stock_position(position): def resolve_ib_future_position(position): - return dict( account=position.account, symbol=position.contract.symbol, @@ -114,7 +112,6 @@ def resolve_ib_future_position(position): def resolve_ib_cash_position(position): - return dict( account=position.account, symbol=position.contract.localSymbol, diff --git a/sysbrokers/IB/ib_trading_hours.py b/sysbrokers/IB/ib_trading_hours.py index 6e76bf4dd4..256ea40c9c 100644 --- a/sysbrokers/IB/ib_trading_hours.py +++ b/sysbrokers/IB/ib_trading_hours.py @@ -48,7 +48,6 @@ def parse_trading_hours_string( trading_hours_string: str, adjustment_hours: int = 0, ) -> listOfTradingHours: - day_by_day = trading_hours_string.split(";") list_of_open_times = [ parse_trading_for_day(string_for_day, adjustment_hours=adjustment_hours) @@ -67,7 +66,6 @@ def parse_trading_hours_string( def parse_trading_for_day( string_for_day: str, adjustment_hours: int = 0 ) -> tradingHours: - start_and_end = string_for_day.split("-") if len(start_and_end) == 1: # closed diff --git a/sysbrokers/broker_capital_data.py b/sysbrokers/broker_capital_data.py index a6c5470981..aa3cbae8ff 100644 --- a/sysbrokers/broker_capital_data.py +++ b/sysbrokers/broker_capital_data.py @@ -11,7 +11,6 @@ class brokerCapitalData(capitalData): def __init__( self, data: dataBlob, log: pst_logger = get_logger("brokerCapitalData") ): - super().__init__(log=log) self._data = data diff --git a/sysbrokers/broker_fx_handling.py b/sysbrokers/broker_fx_handling.py index eba30ccf13..b98214e5db 100644 --- a/sysbrokers/broker_fx_handling.py +++ b/sysbrokers/broker_fx_handling.py @@ -22,7 +22,6 @@ def broker_fx_market_order( account_id: str = arg_not_supplied, ccy2: str = "USD", ) -> brokerTrade: - raise NotImplementedError @property diff --git a/syscontrol/run_process.py b/syscontrol/run_process.py index 409ae1085f..37b0ba0662 100644 --- a/syscontrol/run_process.py +++ b/syscontrol/run_process.py @@ -367,7 +367,6 @@ def check_for_pause_and_log(process_to_run: processToRun) -> bool: ## FINISH CODE def _check_for_stop(process_to_run: processToRun) -> bool: - """ - is my process marked as STOP in process control (check database) diff --git a/syscontrol/strategy_tools.py b/syscontrol/strategy_tools.py index 788b7e4333..a71801bbfd 100644 --- a/syscontrol/strategy_tools.py +++ b/syscontrol/strategy_tools.py @@ -44,7 +44,6 @@ def run_strategy_method(self): def get_strategy_method( data: dataBlob, strategy_name: str, process_name: str, function_name: str ): - strategy_class_instance = get_strategy_class_instance( data=data, strategy_name=strategy_name, process_name=process_name ) diff --git a/syscontrol/timer_functions.py b/syscontrol/timer_functions.py index dc289cefd4..4cf4901129 100644 --- a/syscontrol/timer_functions.py +++ b/syscontrol/timer_functions.py @@ -22,7 +22,6 @@ def __init__( parameters: timerClassParameters, log=get_logger(""), ): - self._function = function_to_execute # class.method to run self._data = data self._parameters = parameters @@ -164,7 +163,6 @@ def check_if_okay_to_run_normal_run(self, last_run: bool = False) -> bool: return okay_to_run def check_if_okay_to_run_normal_run_if_not_last_run(self) -> bool: - exceeded_max = self.completed_max_runs() if exceeded_max: return False @@ -179,7 +177,6 @@ def check_if_okay_to_run_normal_run_if_not_last_run(self) -> bool: return False def check_if_enough_time_has_passed_and_report_status(self) -> bool: - enough_time_has_passed = self.check_if_enough_time_has_elapsed_since_last_run() enough_time_has_passed_status = ( "Not enough time has passed since last run of %s in %s" @@ -215,7 +212,6 @@ def minutes_until_next_run(self) -> float: return remaining_minutes def log_heartbeat_if_required(self): - time_since_heartbeat = self.minutes_since_last_heartbeat() if time_since_heartbeat > self.minutes_between_heartbeats: self.log_heartbeat() @@ -342,7 +338,6 @@ def get_list_of_timer_functions( process_name: str, list_of_timer_names_and_functions_as_strings: list, ) -> listOfTimerFunctions: - list_of_timer_functions_as_list = [ _get_timer_class(data, process_name, entry) for entry in list_of_timer_names_and_functions_as_strings diff --git a/syscore/capital.py b/syscore/capital.py index bf101a35ed..ab141c595b 100755 --- a/syscore/capital.py +++ b/syscore/capital.py @@ -33,7 +33,6 @@ def full_compounding(system: System, **ignored_args) -> pd.Series: def half_compounding(system: System, **ignored_args) -> pd.Series: - ## remove any nans pandl = system.accounts.portfolio().percent.curve().ffill().diff() multiplier = 1.0 diff --git a/syscore/dateutils.py b/syscore/dateutils.py index 47707497de..02c35b2bbf 100755 --- a/syscore/dateutils.py +++ b/syscore/dateutils.py @@ -62,7 +62,6 @@ def calculate_start_and_end_dates( start_period: str = arg_not_supplied, end_period: str = arg_not_supplied, ) -> Tuple[datetime.datetime, datetime.datetime]: - resolved_end_date = _resolve_end_date_given_period_and_explicit_end_date( end_date=end_date, end_period=end_period ) @@ -653,7 +652,6 @@ def check_time_matches_closing_time_to_second( and index_entry.minute == closing_time.minutes and index_entry.second == closing_time.seconds ): - return True else: return False @@ -675,7 +673,6 @@ def strip_timezone_fromdatetime(timestamp_with_tz_info) -> datetime.datetime: def generate_equal_dates_within_year( year: int, number_of_dates: int, force_start_year_align: bool = False ) -> List[datetime.datetime]: - """ Generate equally spaced datetimes within a given year >>> generate_equal_dates_within_year(2022,3) @@ -705,7 +702,6 @@ def generate_equal_dates_within_year( def _calculate_first_date_for_equal_dates( year: int, days_between_periods: int, force_start_year_align: bool = False ) -> datetime.datetime: - start_of_year = datetime.datetime(year, 1, 1) if force_start_year_align: diff --git a/syscore/interactive/date_input.py b/syscore/interactive/date_input.py index a5a215a044..db973633fe 100644 --- a/syscore/interactive/date_input.py +++ b/syscore/interactive/date_input.py @@ -10,7 +10,6 @@ def get_report_dates() -> Tuple[datetime.datetime, datetime.datetime]: - end_date = arg_not_supplied start_date = arg_not_supplied start_period = arg_not_supplied @@ -72,7 +71,6 @@ def get_datetime_input( allow_calendar_days: bool = False, allow_period: bool = False, ) -> Union[str, datetime.datetime, int]: - input_str = _create_input_string_for_datetime_input( prompt=prompt, allow_default_datetime_of_now=allow_default_datetime_of_now, diff --git a/syscore/interactive/menus.py b/syscore/interactive/menus.py index 2c474df612..d5ba01ee04 100644 --- a/syscore/interactive/menus.py +++ b/syscore/interactive/menus.py @@ -82,7 +82,6 @@ def _propose_options_and_get_input_at_top_level(self): return TRAVERSING_MENU def _propose_options_and_get_input_at_sub_level(self) -> int: - sub_menu = self.current_submenu option_chosen = print_menu_and_get_desired_option_index( sub_menu, default_option_index=EXIT_OPTION, default_str="Back" @@ -136,7 +135,6 @@ def kwargs(self) -> dict: def print_menu_of_values_and_get_response( menu_of_options_as_list: List[str], default_str="" ) -> str: - default_option_index, copy_menu_of_options_as_list = _get_index_of_default_option( menu_of_options_as_list=menu_of_options_as_list, default_str=default_str ) @@ -156,7 +154,6 @@ def print_menu_of_values_and_get_response( def _get_index_of_default_option( menu_of_options_as_list: List[str], default_str="" ) -> Tuple[Union[type(None), int], List[str]]: - copy_menu_of_options_as_list = copy(menu_of_options_as_list) if default_str == "": return None, copy_menu_of_options_as_list @@ -218,7 +215,6 @@ def print_menu_and_get_desired_option_index( def _resolve_default_for_dict_of_menu_options( menu_of_options: dict, default_option_index=None, default_str: str = "" ) -> Tuple[bool, dict, int, str]: - """ >>> _resolve_default_for_dict_of_menu_options({1: 'a', 2: 'b'}, 1) diff --git a/syscore/interactive/progress_bar.py b/syscore/interactive/progress_bar.py index 11844d10e6..887529a21c 100644 --- a/syscore/interactive/progress_bar.py +++ b/syscore/interactive/progress_bar.py @@ -26,7 +26,6 @@ def __init__( show_timings=True, toolbar_width: int = 80, ): - self._start_time = time.time() self._current_iteration = 0 self._suffix = suffix diff --git a/syscore/interactive/run_functions.py b/syscore/interactive/run_functions.py index ee680f973e..24d59960e9 100644 --- a/syscore/interactive/run_functions.py +++ b/syscore/interactive/run_functions.py @@ -25,7 +25,7 @@ def interactively_input_arguments_for_function(func, full_funcname): args = [] kwargs = dict() - for (argname, parameter_signature) in func_arguments.items(): + for argname, parameter_signature in func_arguments.items(): arg_value = input_and_type_cast_argument(argname, parameter_signature) is_kwarg = has_default(parameter_signature) @@ -85,7 +85,6 @@ def has_type(parameter_signature) -> bool: def parameter_type( parameter_signature: inspect.Parameter, ): - ptype = parameter_signature.annotation if ptype is EMPTY_VALUE: # get from default diff --git a/syscore/maths.py b/syscore/maths.py index c65db66477..71bed850ad 100755 --- a/syscore/maths.py +++ b/syscore/maths.py @@ -11,7 +11,6 @@ def calculate_weighted_average_with_nans( weights: list, list_of_values: list, sum_of_weights_should_be: float = 1.0 ) -> float: - """ Calculate a weighted average when the weights and/or values might be nans >>> calculate_weighted_average_with_nans([0.2, 0.2, np.nan, 0.4],[2, np.nan, 3, np.nan]) diff --git a/syscore/pandas/full_merge_with_replacement.py b/syscore/pandas/full_merge_with_replacement.py index 38ce7793c4..df1ce8db84 100644 --- a/syscore/pandas/full_merge_with_replacement.py +++ b/syscore/pandas/full_merge_with_replacement.py @@ -95,7 +95,6 @@ def full_merge_of_data_with_both_old_and_new( new_data: Union[pd.Series, pd.DataFrame], keep_older: bool = True, ) -> Union[pd.Series, pd.DataFrame]: - if is_a_series(old_data): assert is_a_series(new_data) merged_data = full_merge_of_existing_series( diff --git a/syscore/pandas/list_of_df.py b/syscore/pandas/list_of_df.py index 041bc68887..f2e436cbd1 100644 --- a/syscore/pandas/list_of_df.py +++ b/syscore/pandas/list_of_df.py @@ -104,7 +104,7 @@ def stacked_df_with_added_time_from_list(data: listOfDataFrames) -> pd.DataFrame aligned_data = data.reindex_to_common_columns() # add on an offset - for (offset_value, data_item) in enumerate(aligned_data): + for offset_value, data_item in enumerate(aligned_data): data_item.index = data_item.index + pd.Timedelta("%dus" % offset_value) # pooled diff --git a/syscore/pandas/merge_data_keeping_past_data.py b/syscore/pandas/merge_data_keeping_past_data.py index ac5a11742a..7c55269e38 100644 --- a/syscore/pandas/merge_data_keeping_past_data.py +++ b/syscore/pandas/merge_data_keeping_past_data.py @@ -139,7 +139,6 @@ def merge_newer_data_no_checks( def _merge_newer_data_no_checks_if_both_old_and_new( old_data: Union[pd.Series, pd.DataFrame], new_data: Union[pd.Series, pd.DataFrame] ) -> mergingDataWithStatus: - last_date_in_old_data = old_data.index[-1] new_data.sort_index() actually_new_data = new_data[new_data.index > last_date_in_old_data] @@ -168,7 +167,6 @@ def spike_check_merged_data( column_to_check_for_spike: str = arg_not_supplied, max_spike: float = VERY_BIG_NUMBER, ) -> mergingDataWithStatus: - merge_status = merged_data_with_status.status merged_data = merged_data_with_status.merged_data @@ -228,7 +226,6 @@ def _get_data_to_check( merged_data: Union[pd.Series, pd.DataFrame], column_to_check_for_spike: str = arg_not_supplied, ) -> Union[pd.Series, pd.DataFrame]: - if is_a_series(merged_data): # already a series data_to_check = merged_data @@ -244,7 +241,6 @@ def _get_data_to_check( def _calculate_change_in_vol_normalised_units(data_to_check: pd.Series) -> pd.Series: - # Calculate the average change per day change_per_day = _calculate_change_in_daily_units(data_to_check) @@ -312,7 +308,6 @@ def _check_for_spikes_in_change_in_vol_normalised_units( relevant_change_in_vol_normalised_units: pd.Series, max_spike: float = VERY_BIG_NUMBER, ) -> Union[datetime.datetime, named_object]: - if any(relevant_change_in_vol_normalised_units > max_spike): first_spike = relevant_change_in_vol_normalised_units.index[ relevant_change_in_vol_normalised_units > max_spike diff --git a/syscore/pandas/merge_data_with_label_column.py b/syscore/pandas/merge_data_with_label_column.py index 231c9a1eb5..70d7014af8 100644 --- a/syscore/pandas/merge_data_with_label_column.py +++ b/syscore/pandas/merge_data_with_label_column.py @@ -185,7 +185,6 @@ def _find_dates_when_labels_change_given_label_data( existing_labels_in_new_period: pd.Series, new_labels_in_new_period: pd.Series, ) -> Union[named_object, Tuple[datetime.datetime, datetime.datetime]]: - # Find the last date when the labels didn't match, and the first date # after that match_dates = _find_dates_when_series_starts_matching( @@ -254,7 +253,6 @@ def _find_dates_when_series_starts_matching( def _match_dates_for_labels_when_not_equal_or_mismatch( series1: pd.Series, period_equal: List[bool] ) -> Tuple[datetime.datetime, datetime.datetime]: - # Want last False value period_equal.reverse() first_false_in_reversed_list = period_equal.index(False) @@ -280,7 +278,6 @@ def _match_dates_for_labels_when_not_equal_or_mismatch( def _match_dates_when_entire_series_of_labels_matches( original_data: pd.DataFrame, new_data: pd.DataFrame ) -> Union[named_object, Tuple[datetime.datetime, datetime.datetime]]: - # Can use entire series becuase all match if new_data.index[0] == original_data.index[0]: # They are same size, so have to use whole of original data @@ -342,7 +339,6 @@ def _stitch_merged_and_existing_data( data_column="PRICE", label_column="PRICE_CONTRACT", ) -> pd.DataFrame: - labelled_merged_data = _get_labelled_merged_data( merged_data_series=merged_data_series, original_data=original_data, @@ -377,7 +373,6 @@ def _get_labelled_merged_data( data_column="PRICE", label_column="PRICE_CONTRACT", ) -> pd.DataFrame: - labels_in_merged_data = _get_merged_label_data( merged_data_series=merged_data_series, original_data=original_data, diff --git a/syscore/pandas/pdutils.py b/syscore/pandas/pdutils.py index 82f6166c40..6f1575a8f9 100755 --- a/syscore/pandas/pdutils.py +++ b/syscore/pandas/pdutils.py @@ -16,7 +16,6 @@ def rolling_pairwise_correlation( x: pd.DataFrame, periods: int, min_periods: int = 3 ) -> pd.Series: - assert len(x.columns) == 2 rolling_corr_df = x.rolling(periods, min_periods=min_periods).corr() @@ -312,7 +311,6 @@ def apply_with_min_periods( not_nan = sum(~np.isnan(xcol)) if not_nan >= min_periods: - return my_func(xcol) else: return np.nan @@ -332,7 +330,6 @@ def from_series_to_matching_df_frame( def from_series_to_df_with_column_names( pd_series: pd.Series, list_of_columns: list ) -> pd.DataFrame: - new_df = pd.concat([pd_series] * len(list_of_columns), axis=1) new_df.columns = list_of_columns diff --git a/syscore/tests/test_correlation.py b/syscore/tests/test_correlation.py index 9428149cbb..9446ec87f8 100644 --- a/syscore/tests/test_correlation.py +++ b/syscore/tests/test_correlation.py @@ -77,7 +77,6 @@ def testPooling(self): @unittest.SkipTest def testFrequency(self): - self.system.config.forecast_correlation_estimate["frequency"] = "D" self.system.config.forecast_correlation_estimate["floor_at_zero"] = False instrument_code = "US10" diff --git a/sysdata/_DEPRECATED/mongo_log.py b/sysdata/_DEPRECATED/mongo_log.py index 395b436548..ec9f71fc59 100644 --- a/sysdata/_DEPRECATED/mongo_log.py +++ b/sysdata/_DEPRECATED/mongo_log.py @@ -89,7 +89,6 @@ def add_before_n_days_to_attribute_dict( def add_after_n_days_to_attribute_dict( attribute_dict: dict, lookback_days: int ) -> dict: - attribute_dict = add_timestamp_cutoff_to_attribute_dict( attribute_dict=attribute_dict, lookback_days=lookback_days, diff --git a/sysdata/_DEPRECATED/mongo_position_by_contract_TO_DEPRECATE.py b/sysdata/_DEPRECATED/mongo_position_by_contract_TO_DEPRECATE.py index 3237ca5798..27aaf7ec3e 100644 --- a/sysdata/_DEPRECATED/mongo_position_by_contract_TO_DEPRECATE.py +++ b/sysdata/_DEPRECATED/mongo_position_by_contract_TO_DEPRECATE.py @@ -14,7 +14,6 @@ class mongoContractPositionData(object): """ def __init__(self, mongo_db=arg_not_supplied, log=logtoscreen("")): - self._log = log self._mongo_data = mongoDataWithMultipleKeys( POSITION_CONTRACT_COLLECTION, mongo_db=mongo_db diff --git a/sysdata/_DEPRECATED/mongo_timed_storage_TO_DEPRECATE.py b/sysdata/_DEPRECATED/mongo_timed_storage_TO_DEPRECATE.py index 59b772d5de..257eaaf7cc 100644 --- a/sysdata/_DEPRECATED/mongo_timed_storage_TO_DEPRECATE.py +++ b/sysdata/_DEPRECATED/mongo_timed_storage_TO_DEPRECATE.py @@ -25,7 +25,6 @@ def _data_name(self) -> str: def __init__( self, mongo_db=arg_not_supplied, log=logtoscreen("mongoStrategyCapitalData") ): - super().__init__(log=log) self._mongo_data = mongoDataWithMultipleKeys( self._collection_name, mongo_db=mongo_db diff --git a/sysdata/_DEPRECATED/timed_storage_TO_DEPRECATE.py b/sysdata/_DEPRECATED/timed_storage_TO_DEPRECATE.py index 239573817c..725088126a 100644 --- a/sysdata/_DEPRECATED/timed_storage_TO_DEPRECATE.py +++ b/sysdata/_DEPRECATED/timed_storage_TO_DEPRECATE.py @@ -40,7 +40,6 @@ def __init__( self.list_of_entries_as_list_of_dicts = list_of_entries_as_list_of_dicts def with_class_object(self): - class_of_entry_list = resolve_function(self.class_of_entry_list_as_str) return classWithListOfEntriesAsListOfDicts( @@ -82,13 +81,11 @@ def _empty_data_series(self): return empty_entry_series def __init__(self, log=logtoscreen("listOfEntriesData")): - super().__init__(log=log) def _delete_all_data_for_args_dict( self, args_dict: dict, are_you_really_sure: bool = False ): - if not are_you_really_sure: self.log.warn("To delete all data, need to set are_you_really_sure=True") return failure @@ -97,7 +94,6 @@ def _delete_all_data_for_args_dict( self._write_series_for_args_dict(args_dict, empty_entry_series) def _update_entry_for_args_dict(self, new_entry: timedEntry, args_dict: dict): - existing_series = self._get_series_for_args_dict(args_dict) if len(existing_series) > 0: # Check types match @@ -131,7 +127,6 @@ def _update_entry_for_args_dict(self, new_entry: timedEntry, args_dict: dict): def _check_class_name_matches_for_new_entry( self, args_dict: dict, new_entry: timedEntry ): - entry_class_name_new_entry = new_entry.containing_data_class_name entry_class_name_existing = self._get_class_of_entry_list_as_str(args_dict) @@ -184,7 +179,6 @@ def _get_series_for_args_dict(self, args_dict) -> listOfEntries: def _get_series_dict_and_class_for_args_dict( self, args_dict: dict ) -> classWithListOfEntriesAsListOfDicts: - class_str_with_series_as_list_of_dicts = ( self._get_series_dict_with_data_class_for_args_dict(args_dict) ) @@ -220,7 +214,6 @@ def _get_class_of_entry_list_as_str( self, args_dict: dict, ) -> str: - ## Use existing data, or if not available use the default for this object try: class_str_with_series_as_list_of_dicts = ( @@ -234,7 +227,6 @@ def _get_class_of_entry_list_as_str( def _get_series_dict_with_data_class_for_args_dict( self, args_dict: dict ) -> classStrWithListOfEntriesAsListOfDicts: - # return data_class, series_as_list_of_dicts ## return missing_data if unvailable raise NotImplementedError("Need to use child class") diff --git a/sysdata/arctic/arctic_adjusted_prices.py b/sysdata/arctic/arctic_adjusted_prices.py index a46b61a15a..38f76b5b6f 100644 --- a/sysdata/arctic/arctic_adjusted_prices.py +++ b/sysdata/arctic/arctic_adjusted_prices.py @@ -15,7 +15,6 @@ class arcticFuturesAdjustedPricesData(futuresAdjustedPricesData): """ def __init__(self, mongo_db=None, log=get_logger("arcticFuturesAdjustedPrices")): - super().__init__(log=log) self._arctic = arcticData(ADJPRICE_COLLECTION, mongo_db=mongo_db) diff --git a/sysdata/arctic/arctic_capital.py b/sysdata/arctic/arctic_capital.py index fb5e2fd88b..cbd85d2861 100644 --- a/sysdata/arctic/arctic_capital.py +++ b/sysdata/arctic/arctic_capital.py @@ -14,7 +14,6 @@ class arcticCapitalData(capitalData): """ def __init__(self, mongo_db=None, log=get_logger("arcticCapitalData")): - super().__init__(log=log) self._arctic = arcticData(CAPITAL_COLLECTION, mongo_db=mongo_db) @@ -40,7 +39,6 @@ def get_capital_pd_df_for_strategy(self, strategy_name: str) -> pd.DataFrame: return pd_series def _delete_all_capital_for_strategy_no_checking(self, strategy_name: str): - self.arctic.delete(strategy_name) def update_capital_pd_df_for_strategy( diff --git a/sysdata/arctic/arctic_connection.py b/sysdata/arctic/arctic_connection.py index 5e295bed5a..918eeebb14 100644 --- a/sysdata/arctic/arctic_connection.py +++ b/sysdata/arctic/arctic_connection.py @@ -17,7 +17,6 @@ class arcticData(object): """ def __init__(self, collection_name, mongo_db=None): - if mongo_db is None: mongo_db = mongoDb() diff --git a/sysdata/arctic/arctic_futures_per_contract_prices.py b/sysdata/arctic/arctic_futures_per_contract_prices.py index 9348797257..25cd11f4ed 100644 --- a/sysdata/arctic/arctic_futures_per_contract_prices.py +++ b/sysdata/arctic/arctic_futures_per_contract_prices.py @@ -25,7 +25,6 @@ class arcticFuturesContractPriceData(futuresContractPriceData): """ def __init__(self, mongo_db=None, log=get_logger("arcticFuturesContractPriceData")): - super().__init__(log=log) self._arctic_connection = arcticData(CONTRACT_COLLECTION, mongo_db=mongo_db) @@ -57,7 +56,6 @@ def _get_merged_prices_for_contract_object_no_checking( def _get_prices_at_frequency_for_contract_object_no_checking( self, futures_contract_object: futuresContract, frequency: Frequency ) -> futuresContractPrices: - ident = from_contract_and_freq_to_key( futures_contract_object, frequency=frequency ) @@ -93,7 +91,6 @@ def _write_prices_at_frequency_for_contract_object_no_checking( futures_price_data: futuresContractPrices, frequency: Frequency, ): - log = futures_contract_object.log(self.log) ident = from_contract_and_freq_to_key( futures_contract_object, frequency=frequency @@ -127,7 +124,6 @@ def get_contracts_with_merged_price_data(self) -> listOfFuturesContracts: def get_contracts_with_price_data_for_frequency( self, frequency: Frequency ) -> listOfFuturesContracts: - list_of_contract_and_freq_tuples = ( self._get_contract_and_frequencies_with_price_data() ) @@ -151,7 +147,6 @@ def has_merged_price_data_for_contract( def has_price_data_for_contract_at_frequency( self, contract_object: futuresContract, frequency: Frequency ) -> bool: - return self.arctic_connection.has_keyname( from_contract_and_freq_to_key(contract_object, frequency=frequency) ) diff --git a/sysdata/arctic/arctic_historic_contract_positions.py b/sysdata/arctic/arctic_historic_contract_positions.py index 4b03d853cd..03ab65e29b 100644 --- a/sysdata/arctic/arctic_historic_contract_positions.py +++ b/sysdata/arctic/arctic_historic_contract_positions.py @@ -14,7 +14,6 @@ class arcticContractPositionData(contractPositionData): def __init__(self, mongo_db=None, log=get_logger("arcticContractPositionData")): - super().__init__(log=log) self._arctic = arcticData(CONTRACT_POSITION_COLLECTION, mongo_db=mongo_db) diff --git a/sysdata/arctic/arctic_historic_strategy_positions.py b/sysdata/arctic/arctic_historic_strategy_positions.py index eeff1d1719..286e516927 100644 --- a/sysdata/arctic/arctic_historic_strategy_positions.py +++ b/sysdata/arctic/arctic_historic_strategy_positions.py @@ -16,7 +16,6 @@ class arcticStrategyPositionData(strategyPositionData): def __init__(self, mongo_db=None, log=get_logger("arcticStrategyPositionData")): - super().__init__(log=log) self._arctic = arcticData(STRATEGY_POSITION_COLLECTION, mongo_db=mongo_db) @@ -39,7 +38,6 @@ def get_list_of_instrument_strategies(self) -> listOfInstrumentStrategies: def _write_updated_position_series_for_instrument_strategy_object( self, instrument_strategy: instrumentStrategy, updated_series: pd.Series ): - ident = instrument_strategy.key updated_data_as_df = pd.DataFrame(updated_series) updated_data_as_df.columns = ["position"] @@ -55,7 +53,6 @@ def _delete_position_series_for_instrument_strategy_object_without_checking( def get_position_as_series_for_instrument_strategy_object( self, instrument_strategy: instrumentStrategy ) -> pd.Series: - keyname = instrument_strategy.key try: pd_df = self.arctic.read(keyname) diff --git a/sysdata/arctic/arctic_multiple_prices.py b/sysdata/arctic/arctic_multiple_prices.py index 1bdafc2413..4782a2301b 100644 --- a/sysdata/arctic/arctic_multiple_prices.py +++ b/sysdata/arctic/arctic_multiple_prices.py @@ -25,7 +25,6 @@ class arcticFuturesMultiplePricesData(futuresMultiplePricesData): def __init__( self, mongo_db=None, log=get_logger("arcticFuturesMultiplePricesData") ): - super().__init__(log=log) self._arctic = arcticData(MULTIPLE_COLLECTION, mongo_db=mongo_db) @@ -50,7 +49,6 @@ def _get_multiple_prices_without_checking( def _delete_multiple_prices_without_any_warning_be_careful( self, instrument_code: str ): - self.arctic.delete(instrument_code) self.log.debug( "Deleted multiple prices for %s from %s" % (instrument_code, str(self)) @@ -59,7 +57,6 @@ def _delete_multiple_prices_without_any_warning_be_careful( def _add_multiple_prices_without_checking_for_existing_entry( self, instrument_code: str, multiple_price_data_object: futuresMultiplePrices ): - multiple_price_data_aspd = pd.DataFrame(multiple_price_data_object) multiple_price_data_aspd = _change_contracts_to_str(multiple_price_data_aspd) diff --git a/sysdata/arctic/arctic_optimal_positions.py b/sysdata/arctic/arctic_optimal_positions.py index 381240aaa7..4e004c489e 100644 --- a/sysdata/arctic/arctic_optimal_positions.py +++ b/sysdata/arctic/arctic_optimal_positions.py @@ -21,7 +21,6 @@ class arcticOptimalPositionData(optimalPositionData): def __init__(self, mongo_db=None, log=get_logger("arcticOptimalPositionData")): - super().__init__(log=log) self._arctic_connection = arcticData( @@ -38,7 +37,6 @@ def arctic_connection(self): def get_list_of_instrument_strategies_with_optimal_position( self, ) -> listOfInstrumentStrategies: - raw_list_of_instrument_strategies = self.arctic_connection.get_keynames() list_of_instrument_strategies = [ instrumentStrategy.from_key(key) @@ -50,7 +48,6 @@ def get_list_of_instrument_strategies_with_optimal_position( def get_optimal_position_as_df_for_instrument_strategy( self, instrument_strategy: instrumentStrategy ) -> pd.DataFrame: - try: ident = instrument_strategy.key df_result = self.arctic_connection.read(ident) diff --git a/sysdata/arctic/arctic_spotfx_prices.py b/sysdata/arctic/arctic_spotfx_prices.py index 5badf78342..10b55b78b1 100644 --- a/sysdata/arctic/arctic_spotfx_prices.py +++ b/sysdata/arctic/arctic_spotfx_prices.py @@ -13,7 +13,6 @@ class arcticFxPricesData(fxPricesData): """ def __init__(self, mongo_db=None, log=get_logger("arcticFxPricesData")): - super().__init__(log=log) self._arctic = arcticData(SPOTFX_COLLECTION, mongo_db=mongo_db) @@ -28,7 +27,6 @@ def get_list_of_fxcodes(self) -> list: return self.arctic.get_keynames() def _get_fx_prices_without_checking(self, currency_code: str) -> fxPrices: - fx_data = self.arctic.read(currency_code) fx_prices = fxPrices(fx_data[fx_data.columns[0]]) diff --git a/sysdata/arctic/arctic_spreads.py b/sysdata/arctic/arctic_spreads.py index b8a145dc31..8b2c8208c2 100644 --- a/sysdata/arctic/arctic_spreads.py +++ b/sysdata/arctic/arctic_spreads.py @@ -10,7 +10,6 @@ class arcticSpreadsForInstrumentData(spreadsForInstrumentData): def __init__(self, mongo_db=None, log=get_logger("arcticSpreadsForInstrument")): - super().__init__(log=log) self._arctic = arcticData(SPREAD_COLLECTION, mongo_db=mongo_db) diff --git a/sysdata/config/control_config.py b/sysdata/config/control_config.py index 7d8081d74f..72d1f6c150 100644 --- a/sysdata/config/control_config.py +++ b/sysdata/config/control_config.py @@ -7,7 +7,6 @@ def get_control_config() -> Config: - private_control_path = get_full_path_for_private_config(PRIVATE_CONTROL_CONFIG_FILE) try: diff --git a/sysdata/config/instruments.py b/sysdata/config/instruments.py index 1d5a5f60a6..69b9756731 100644 --- a/sysdata/config/instruments.py +++ b/sysdata/config/instruments.py @@ -56,7 +56,6 @@ def generate_matching_duplicate_dict(config: Config): def get_duplicate_dict_entry(key: str, include_dict: dict, exclude_dict: dict) -> dict: - include_entry = get_entry_for_key_in_dict(key, include_dict, is_include_dict=True) exclude_entry = get_entry_for_key_in_dict(key, exclude_dict, is_include_dict=False) @@ -64,7 +63,6 @@ def get_duplicate_dict_entry(key: str, include_dict: dict, exclude_dict: dict) - def get_entry_for_key_in_dict(key: str, check_dict: dict, is_include_dict: bool = True): - if key not in check_dict.keys(): if is_include_dict: print( diff --git a/sysdata/csv/csv_adjusted_prices.py b/sysdata/csv/csv_adjusted_prices.py index 4ff867745f..dc1b2178fc 100644 --- a/sysdata/csv/csv_adjusted_prices.py +++ b/sysdata/csv/csv_adjusted_prices.py @@ -23,7 +23,6 @@ class csvFuturesAdjustedPricesData(futuresAdjustedPricesData): def __init__( self, datapath=arg_not_supplied, log=get_logger("csvFuturesContractPriceData") ): - super().__init__(log=log) if datapath is arg_not_supplied: @@ -70,7 +69,6 @@ def _delete_adjusted_prices_without_any_warning_be_careful( def _add_adjusted_prices_without_checking_for_existing_entry( self, instrument_code: str, adjusted_price_data: futuresAdjustedPrices ): - # Ensures the file will be written with a column header adjusted_price_data_as_dataframe = pd.DataFrame(adjusted_price_data) adjusted_price_data_as_dataframe.columns = ["price"] diff --git a/sysdata/csv/csv_capital_data.py b/sysdata/csv/csv_capital_data.py index 6499c1f9fb..34d682f341 100644 --- a/sysdata/csv/csv_capital_data.py +++ b/sysdata/csv/csv_capital_data.py @@ -11,7 +11,6 @@ class csvCapitalData(capitalData): def __init__(self, datapath=arg_not_supplied, log=get_logger("csvCapitalData")): - super().__init__(log=log) if datapath is None: diff --git a/sysdata/csv/csv_contract_position_data.py b/sysdata/csv/csv_contract_position_data.py index 1ad090dbf1..5d1c91a6f7 100644 --- a/sysdata/csv/csv_contract_position_data.py +++ b/sysdata/csv/csv_contract_position_data.py @@ -17,7 +17,6 @@ class csvContractPositionData(contractPositionData): def __init__( self, datapath=arg_not_supplied, log=get_logger("csvContractPositionData") ): - super().__init__(log=log) if datapath is None: diff --git a/sysdata/csv/csv_futures_contract_prices.py b/sysdata/csv/csv_futures_contract_prices.py index 2d48a056e1..7dcb705dcc 100644 --- a/sysdata/csv/csv_futures_contract_prices.py +++ b/sysdata/csv/csv_futures_contract_prices.py @@ -36,7 +36,6 @@ def __init__( log=get_logger("csvFuturesContractPriceData"), config: ConfigCsvFuturesPrices = arg_not_supplied, ): - super().__init__(log=log) if datapath is arg_not_supplied: raise Exception("Need to pass datapath") @@ -66,7 +65,6 @@ def _get_merged_prices_for_contract_object_no_checking( def _get_prices_at_frequency_for_contract_object_no_checking( self, futures_contract_object: futuresContract, frequency: Frequency ) -> futuresContractPrices: - keyname = self._keyname_given_contract_object_and_freq( futures_contract_object, frequency=frequency ) @@ -131,7 +129,6 @@ def _write_prices_at_frequency_for_contract_object_no_checking( futures_price_data: futuresContractPrices, frequency: Frequency, ): - keyname = self._keyname_given_contract_object_and_freq( futures_contract_object, frequency=frequency ) @@ -183,7 +180,6 @@ def get_contracts_with_merged_price_data(self) -> listOfFuturesContracts: def get_contracts_with_price_data_for_frequency( self, frequency: Frequency ) -> listOfFuturesContracts: - list_of_contract_and_freq_tuples = ( self._get_contract_freq_tuples_with_price_data() ) diff --git a/sysdata/csv/csv_futures_contracts.py b/sysdata/csv/csv_futures_contracts.py index 81d0196d0a..1bdb0608b0 100644 --- a/sysdata/csv/csv_futures_contracts.py +++ b/sysdata/csv/csv_futures_contracts.py @@ -16,7 +16,6 @@ class csvFuturesContractData(futuresContractData): def __init__( self, datapath=arg_not_supplied, log=get_logger("csvFuturesContractData") ): - super().__init__(log=log) if datapath is arg_not_supplied: @@ -75,5 +74,4 @@ def _add_contract_object_without_checking_for_existing_entry(self, contract_obje def _get_contract_data_without_checking( self, instrument_code: str, contract_date: str ): - raise NotImplementedError("used for backup only no read methods") diff --git a/sysdata/csv/csv_historic_orders.py b/sysdata/csv/csv_historic_orders.py index cbc0045500..5ff31aaeba 100644 --- a/sysdata/csv/csv_historic_orders.py +++ b/sysdata/csv/csv_historic_orders.py @@ -30,7 +30,6 @@ class csvStrategyHistoricOrdersData(strategyHistoricOrdersData): def __init__( self, datapath=arg_not_supplied, log=get_logger("csvStrategyPositionData") ): - super().__init__(log=log) if datapath is None: @@ -50,7 +49,6 @@ class csvContractHistoricOrdersData(contractHistoricOrdersData): def __init__( self, datapath=arg_not_supplied, log=get_logger("csvContractPositionData") ): - super().__init__(log=log) if datapath is None: @@ -70,7 +68,6 @@ class csvBrokerHistoricOrdersData(contractHistoricOrdersData): def __init__( self, datapath=arg_not_supplied, log=get_logger("csvBrokerHistoricOrdersData") ): - super().__init__(log=log) if datapath is None: diff --git a/sysdata/csv/csv_instrument_data.py b/sysdata/csv/csv_instrument_data.py index b6195844b3..6f68d2baed 100644 --- a/sysdata/csv/csv_instrument_data.py +++ b/sysdata/csv/csv_instrument_data.py @@ -27,7 +27,6 @@ def __init__( datapath=arg_not_supplied, log=get_logger("csvFuturesInstrumentData"), ): - super().__init__(log=log) if datapath is arg_not_supplied: @@ -79,7 +78,6 @@ def get_all_instrument_data_as_df(self) -> pd.DataFrame: return config_data def _instrument_csv_as_df(self) -> pd.DataFrame: - config_data = getattr(self, "_instrument_df", None) if config_data is None: config_data = self._load_and_store_instrument_csv_as_df() diff --git a/sysdata/csv/csv_multiple_prices.py b/sysdata/csv/csv_multiple_prices.py index 8f440aba16..6a6a3430f4 100644 --- a/sysdata/csv/csv_multiple_prices.py +++ b/sysdata/csv/csv_multiple_prices.py @@ -29,7 +29,6 @@ def __init__( datapath: str = arg_not_supplied, log=get_logger("csvFuturesMultiplePricesData"), ): - super().__init__(log=log) if datapath is arg_not_supplied: @@ -50,7 +49,6 @@ def get_list_of_instruments(self): def _get_multiple_prices_without_checking( self, instrument_code: str ) -> futuresMultiplePrices: - instr_all_price_data = self._read_instrument_prices(instrument_code) for contract_col_name in list_of_contract_column_names: instr_all_price_data[contract_col_name] = instr_all_price_data[ @@ -69,7 +67,6 @@ def _delete_multiple_prices_without_any_warning_be_careful( def _add_multiple_prices_without_checking_for_existing_entry( self, instrument_code: str, multiple_price_data: futuresMultiplePrices ): - filename = self._filename_given_instrument_code(instrument_code) multiple_price_data.to_csv(filename, index_label=DATE_INDEX_NAME) diff --git a/sysdata/csv/csv_optimal_position.py b/sysdata/csv/csv_optimal_position.py index 34503bafd3..af6b08c661 100644 --- a/sysdata/csv/csv_optimal_position.py +++ b/sysdata/csv/csv_optimal_position.py @@ -17,7 +17,6 @@ class csvOptimalPositionData(optimalPositionData): def __init__( self, datapath=arg_not_supplied, log=get_logger("csvOptimalPositionData") ): - super().__init__(log=log) if datapath is None: diff --git a/sysdata/csv/csv_roll_calendars.py b/sysdata/csv/csv_roll_calendars.py index 770279da00..9adf974466 100644 --- a/sysdata/csv/csv_roll_calendars.py +++ b/sysdata/csv/csv_roll_calendars.py @@ -24,7 +24,6 @@ class csvRollCalendarData(rollCalendarData): def __init__( self, datapath=arg_not_supplied, log=get_logger("csvRollCalendarData") ): - super().__init__(log=log) if datapath is arg_not_supplied: @@ -45,7 +44,6 @@ def get_list_of_instruments(self) -> list: def _get_roll_calendar_without_checking(self, instrument_code: str) -> rollCalendar: filename = self._filename_given_instrument_code(instrument_code) try: - roll_calendar = pd_readcsv(filename, date_index_name=DATE_INDEX_NAME) except OSError: self.log.warning("Can't find roll calendar file %s" % filename) diff --git a/sysdata/csv/csv_roll_parameters.py b/sysdata/csv/csv_roll_parameters.py index 52e6492a2e..e8dfcc542e 100644 --- a/sysdata/csv/csv_roll_parameters.py +++ b/sysdata/csv/csv_roll_parameters.py @@ -66,7 +66,6 @@ class csvRollParametersData(rollParametersData): def __init__( self, log=get_logger("csvRollParametersData"), datapath=arg_not_supplied ): - super().__init__(log=log) if datapath is arg_not_supplied: datapath = ROLLS_DATAPATH @@ -84,7 +83,6 @@ def get_list_of_instruments(self) -> list: def _get_roll_parameters_without_checking( self, instrument_code: str ) -> rollParameters: - all_parameters = self.get_roll_parameters_all_instruments() return all_parameters.get_roll_parameters_for_instrument(instrument_code) diff --git a/sysdata/csv/csv_roll_state_storage.py b/sysdata/csv/csv_roll_state_storage.py index 2750853180..59adae5b2d 100644 --- a/sysdata/csv/csv_roll_state_storage.py +++ b/sysdata/csv/csv_roll_state_storage.py @@ -11,7 +11,6 @@ class csvRollStateData(rollStateData): """ def __init__(self, datapath=arg_not_supplied, log=get_logger("csvRollStateData")): - super().__init__(log=log) if datapath is arg_not_supplied: diff --git a/sysdata/csv/csv_spread_costs.py b/sysdata/csv/csv_spread_costs.py index 2783172262..de2d5f0ef0 100644 --- a/sysdata/csv/csv_spread_costs.py +++ b/sysdata/csv/csv_spread_costs.py @@ -25,7 +25,6 @@ def __init__( datapath=arg_not_supplied, log=get_logger("csvSpreadCostData"), ): - super().__init__(log=log) if datapath is arg_not_supplied: diff --git a/sysdata/csv/csv_spreads.py b/sysdata/csv/csv_spreads.py index d6465ebd1f..d3b6b579fb 100644 --- a/sysdata/csv/csv_spreads.py +++ b/sysdata/csv/csv_spreads.py @@ -24,7 +24,6 @@ class csvSpreadsForInstrumentData(spreadsForInstrumentData): def __init__( self, datapath=arg_not_supplied, log=get_logger("csvSpreadsForInstrumentData") ): - super().__init__(log=log) if datapath is arg_not_supplied: @@ -66,7 +65,6 @@ def _delete_spreads_without_any_warning_be_careful(self, instrument_code: str): def _add_spreads_without_checking_for_existing_entry( self, instrument_code: str, spreads: spreadsForInstrument ): - # Ensures the file will be written with a column header spreads_as_dataframe = pd.DataFrame(spreads) spreads_as_dataframe.columns = [SPREAD_COLUMN_NAME] diff --git a/sysdata/csv/csv_strategy_position_data.py b/sysdata/csv/csv_strategy_position_data.py index 224d3ac620..773af5fc3c 100644 --- a/sysdata/csv/csv_strategy_position_data.py +++ b/sysdata/csv/csv_strategy_position_data.py @@ -17,7 +17,6 @@ class csvStrategyPositionData(strategyPositionData): def __init__( self, datapath=arg_not_supplied, log=get_logger("csvStrategyPositionData") ): - super().__init__(log=log) if datapath is None: diff --git a/sysdata/data_blob.py b/sysdata/data_blob.py index f2e6e70509..8664cb8b63 100644 --- a/sysdata/data_blob.py +++ b/sysdata/data_blob.py @@ -11,6 +11,7 @@ from sysdata.mongodb.mongo_IB_client_id import mongoIbBrokerClientIdData from sysdata.parquet.parquet_access import ParquetAccess + class dataBlob(object): def __init__( self, @@ -89,7 +90,9 @@ def add_class_object(self, class_object, use_prefix: str = arg_not_supplied): new_name = self._get_new_name(class_name, use_prefix=use_prefix) if not self._already_existing_class_name(new_name): resolved_instance = self._get_resolved_instance_of_class(class_object) - self._add_new_class_with_new_name(resolved_instance=resolved_instance, attr_name=new_name) + self._add_new_class_with_new_name( + resolved_instance=resolved_instance, attr_name=new_name + ) def _get_resolved_instance_of_class(self, class_object): class_adding_method = self._get_class_adding_method(class_object) @@ -104,7 +107,7 @@ def _get_class_adding_method(self, class_object): csv=self._add_csv_class, arctic=self._add_arctic_class, mongo=self._add_mongo_class, - parquet = self._add_parquet_class + parquet=self._add_parquet_class, ) method_to_add_with = class_dict.get(prefix, None) @@ -173,7 +176,9 @@ def _add_arctic_class(self, class_object): def _add_parquet_class(self, class_object): log = self._get_specific_logger(class_object) try: - resolved_instance = class_object(parquet_access = self.parquet_access, log=log) + resolved_instance = class_object( + parquet_access=self.parquet_access, log=log + ) except Exception as e: class_name = get_class_name(class_object) msg = ( @@ -239,8 +244,9 @@ def _resolve_names_and_add(self, resolved_instance, new_name: str): def _get_new_name(self, class_name: str, use_prefix: str = arg_not_supplied) -> str: split_up_name = camel_case_split(class_name) attr_name = identifying_name( - split_up_name, keep_original_prefix=self._keep_original_prefix, - use_prefix=use_prefix + split_up_name, + keep_original_prefix=self._keep_original_prefix, + use_prefix=use_prefix, ) return attr_name @@ -376,14 +382,19 @@ def log_name(self) -> str: return log_name -source_dict = dict(arctic="db", mongo="db", csv="db", parquet="db",ib="broker") +source_dict = dict(arctic="db", mongo="db", csv="db", parquet="db", ib="broker") + def get_parquet_root_directory(config): path = config.get_element("parquet_store") return get_resolved_pathname(path) -def identifying_name(split_up_name: list, keep_original_prefix: bool=False, use_prefix: str = arg_not_supplied) -> str: +def identifying_name( + split_up_name: list, + keep_original_prefix: bool = False, + use_prefix: str = arg_not_supplied, +) -> str: """ Turns sourceClassNameData into broker_class_name or db_class_name diff --git a/sysdata/futures/contracts.py b/sysdata/futures/contracts.py index 8cebc8ccd7..df71aaae5f 100644 --- a/sysdata/futures/contracts.py +++ b/sysdata/futures/contracts.py @@ -20,7 +20,6 @@ class futuresContractData(baseData): """ def __init__(self, log=get_logger("futuresInstrumentData")): - super().__init__(log=log) def __repr__(self): @@ -45,7 +44,6 @@ def get_contract_object( def delete_contract_data( self, instrument_code: str, contract_date: str, are_you_sure=False ): - log = self.log.setup( instrument_code=instrument_code, contract_date=contract_date ) @@ -78,7 +76,6 @@ def delete_all_contracts_for_instrument( def add_contract_data( self, contract_object: futuresContract, ignore_duplication: bool = False ): - instrument_code = contract_object.instrument_code contract_date = contract_object.date_str diff --git a/sysdata/futures/futures_per_contract_prices.py b/sysdata/futures/futures_per_contract_prices.py index 828d94c5ad..cca1016ad9 100644 --- a/sysdata/futures/futures_per_contract_prices.py +++ b/sysdata/futures/futures_per_contract_prices.py @@ -95,7 +95,6 @@ def has_merged_price_data_for_contract( def has_price_data_for_contract_at_frequency( self, contract_object: futuresContract, frequency: Frequency ) -> bool: - list_of_contracts = self.get_contracts_with_price_data_for_frequency( frequency=frequency ) @@ -351,7 +350,6 @@ def update_prices_at_frequency_for_contract( check_for_spike: bool = True, max_price_spike: float = VERY_BIG_NUMBER, ) -> int: - new_log = contract_object.log(self.log) if len(new_futures_per_contract_prices) == 0: @@ -496,13 +494,11 @@ def delete_prices_at_frequency_for_instrument_code( ) def get_contracts_with_merged_price_data(self) -> listOfFuturesContracts: - raise NotImplementedError(BASE_CLASS_ERROR) def get_contracts_with_price_data_for_frequency( self, frequency: Frequency ) -> listOfFuturesContracts: - raise NotImplementedError(BASE_CLASS_ERROR) def _delete_merged_prices_for_contract_object_with_no_checks_be_careful( @@ -520,7 +516,6 @@ def _write_merged_prices_for_contract_object_no_checking( futures_contract_object: futuresContract, futures_price_data: futuresContractPrices, ): - raise NotImplementedError(BASE_CLASS_ERROR) def _write_prices_at_frequency_for_contract_object_no_checking( @@ -529,17 +524,14 @@ def _write_prices_at_frequency_for_contract_object_no_checking( futures_price_data: futuresContractPrices, frequency: Frequency, ): - raise NotImplementedError(BASE_CLASS_ERROR) def _get_merged_prices_for_contract_object_no_checking( self, contract_object: futuresContract ) -> futuresContractPrices: - raise NotImplementedError(BASE_CLASS_ERROR) def _get_prices_at_frequency_for_contract_object_no_checking( self, futures_contract_object: futuresContract, frequency: Frequency ) -> futuresContractPrices: - raise NotImplementedError(BASE_CLASS_ERROR) diff --git a/sysdata/futures/roll_calendars.py b/sysdata/futures/roll_calendars.py index 064d9adec4..b33db916a4 100644 --- a/sysdata/futures/roll_calendars.py +++ b/sysdata/futures/roll_calendars.py @@ -59,7 +59,6 @@ def add_roll_calendar( roll_calendar: rollCalendar, ignore_duplication: bool = False, ): - self.log.debug("Updating log attributes", instrument_code=instrument_code) if self.is_code_in_data(instrument_code): diff --git a/sysdata/futures/rolls_parameters.py b/sysdata/futures/rolls_parameters.py index f612982358..1dd269c46e 100644 --- a/sysdata/futures/rolls_parameters.py +++ b/sysdata/futures/rolls_parameters.py @@ -66,7 +66,6 @@ def add_roll_parameters( roll_parameters: rollParameters, ignore_duplication: bool = False, ): - self.log.debug("Updating log attributes", instrument_code=instrument_code) if self.is_code_in_data(instrument_code): diff --git a/sysdata/mongodb/mongo_IB_client_id.py b/sysdata/mongodb/mongo_IB_client_id.py index 3a631cac5a..3803fffd5c 100644 --- a/sysdata/mongodb/mongo_IB_client_id.py +++ b/sysdata/mongodb/mongo_IB_client_id.py @@ -18,7 +18,6 @@ def __init__( idoffset=arg_not_supplied, log=get_logger("mongoIDTracker"), ): - super().__init__(log=log, idoffset=idoffset) self._mongo_data = mongoDataWithSingleKey( IB_CLIENT_COLLECTION, IB_ID_REF, mongo_db diff --git a/sysdata/mongodb/mongo_connection.py b/sysdata/mongodb/mongo_connection.py index ea004f6179..b97e6cef09 100644 --- a/sysdata/mongodb/mongo_connection.py +++ b/sysdata/mongodb/mongo_connection.py @@ -31,14 +31,12 @@ def mongo_defaults(**kwargs): production_config = get_production_config() output_dict = {} for param_name in LIST_OF_MONGO_PARAMS: - if param_name in passed_param_names: param_value = kwargs[param_name] else: param_value = arg_not_supplied if param_value is arg_not_supplied: - param_value = getattr(production_config, param_name) output_dict[param_name] = param_value @@ -89,7 +87,6 @@ def __init__( mongo_host: str = arg_not_supplied, mongo_port: int = arg_not_supplied, ): - database_name, host, port = mongo_defaults( mongo_database_name=mongo_database_name, mongo_host=mongo_host, @@ -121,7 +118,6 @@ class mongoConnection(object): """ def __init__(self, collection_name: str, mongo_db: mongoDb = arg_not_supplied): - # FIXME REMOVE NONE WHEN CODE PROPERLY REFACTORED if mongo_db is arg_not_supplied or mongo_db is None: mongo_db = mongoDb() @@ -152,7 +148,6 @@ def __repr__(self): ) def get_indexes(self): - raw_index_information = copy(self.collection.index_information()) if len(raw_index_information) == 0: diff --git a/sysdata/mongodb/mongo_futures_contracts.py b/sysdata/mongodb/mongo_futures_contracts.py index 34845b573c..6353658d93 100644 --- a/sysdata/mongodb/mongo_futures_contracts.py +++ b/sysdata/mongodb/mongo_futures_contracts.py @@ -24,7 +24,6 @@ class mongoFuturesContractData(futuresContractData): def __init__( self, mongo_db=arg_not_supplied, log=get_logger("mongoFuturesContractData") ): - super().__init__(log=log) mongo_data = mongoDataWithSingleKey( CONTRACT_COLLECTION, "contract_key", mongo_db=mongo_db @@ -60,7 +59,6 @@ def get_list_of_all_instruments_with_contracts(self) -> list: def get_all_contract_objects_for_instrument_code( self, instrument_code: str ) -> listOfFuturesContracts: - list_of_keys = self._get_all_contract_keys_for_instrument_code(instrument_code) list_of_objects = [ self._get_contract_data_from_key_without_checking(key) @@ -94,14 +92,12 @@ def get_list_of_contract_dates_for_instrument_code( def _get_contract_data_without_checking( self, instrument_code: str, contract_id: str ) -> futuresContract: - key = contract_key_from_code_and_id(instrument_code, contract_id) contract_object = self._get_contract_data_from_key_without_checking(key) return contract_object def _get_contract_data_from_key_without_checking(self, key: str) -> futuresContract: - result_dict = self.mongo_data.get_result_dict_for_key_without_key_value(key) contract_object = futuresContract.create_from_dict(result_dict) @@ -111,7 +107,6 @@ def _get_contract_data_from_key_without_checking(self, key: str) -> futuresContr def _delete_contract_data_without_any_warning_be_careful( self, instrument_code: str, contract_date: str ): - key = contract_key_from_code_and_id(instrument_code, contract_date) self.mongo_data.delete_data_without_any_warning(key) diff --git a/sysdata/mongodb/mongo_generic.py b/sysdata/mongodb/mongo_generic.py index 4bd0be5e55..f1ed33785b 100644 --- a/sysdata/mongodb/mongo_generic.py +++ b/sysdata/mongodb/mongo_generic.py @@ -121,7 +121,6 @@ def delete_data_without_any_warning(self, key): self.collection.remove({key_name: key}) def delete_data_with_any_warning_for_custom_dict(self, custom_dict: dict): - self.collection.remove(custom_dict) def add_data(self, key, data_dict: dict, allow_overwrite=False, clean_ints=True): @@ -149,7 +148,6 @@ def add_data(self, key, data_dict: dict, allow_overwrite=False, clean_ints=True) ) def _update_existing_data_with_cleaned_dict(self, key, cleaned_data_dict): - key_name = self.key_name self.collection.update_one({key_name: key}, {"$set": cleaned_data_dict}) @@ -267,7 +265,6 @@ def add_data( def _update_existing_data_with_cleaned_dict( self, dict_of_keys: dict, cleaned_data_dict: dict ): - self._mongo.collection.update_one(dict_of_keys, {"$set": cleaned_data_dict}) def _add_new_cleaned_dict(self, dict_of_keys: dict, cleaned_data_dict: dict): @@ -278,7 +275,6 @@ def _add_new_cleaned_dict(self, dict_of_keys: dict, cleaned_data_dict: dict): self._mongo.collection.insert_one(dict_with_both_keys_and_data) def delete_data_without_any_warning(self, dict_of_keys): - self._mongo.collection.remove(dict_of_keys) diff --git a/sysdata/mongodb/mongo_historic_orders.py b/sysdata/mongodb/mongo_historic_orders.py index 95109476ea..8c823ac033 100644 --- a/sysdata/mongodb/mongo_historic_orders.py +++ b/sysdata/mongodb/mongo_historic_orders.py @@ -109,7 +109,6 @@ def get_list_of_order_ids_in_date_range( period_start: datetime.datetime, period_end: datetime.datetime = arg_not_supplied, ) -> list: - if period_end is arg_not_supplied: period_end = datetime.datetime.now() @@ -138,7 +137,6 @@ def _name(self): def get_list_of_order_ids_for_instrument_strategy( self, instrument_strategy: instrumentStrategy ) -> list: - old_list_of_order_id = ( self._get_list_of_order_ids_for_instrument_strategy_specify_key( instrument_strategy, "old_key" @@ -155,7 +153,6 @@ def get_list_of_order_ids_for_instrument_strategy( def _get_list_of_order_ids_for_instrument_strategy_specify_key( self, instrument_strategy: instrumentStrategy, keyfield: str ) -> list: - object_key = getattr(instrument_strategy, keyfield) custom_dict = dict(key=object_key) list_of_result_dicts = self.mongo_data.get_list_of_result_dict_for_custom_dict( diff --git a/sysdata/mongodb/mongo_lock_data.py b/sysdata/mongodb/mongo_lock_data.py index 9906a41de2..bea2d36f95 100644 --- a/sysdata/mongodb/mongo_lock_data.py +++ b/sysdata/mongodb/mongo_lock_data.py @@ -15,7 +15,6 @@ class mongoLockData(lockData): """ def __init__(self, mongo_db=arg_not_supplied, log=get_logger("mongoLockData")): - super().__init__(log=log) self._mongo_data = mongoDataWithSingleKey( LOCK_STATUS_COLLECTION, "instrument_code", mongo_db=mongo_db diff --git a/sysdata/mongodb/mongo_margin.py b/sysdata/mongodb/mongo_margin.py index fbe1095492..a81cd07ebd 100644 --- a/sysdata/mongodb/mongo_margin.py +++ b/sysdata/mongodb/mongo_margin.py @@ -21,7 +21,6 @@ def __init__( mongo_db=arg_not_supplied, log=get_logger("mongoMarginData"), ): - self._mongo_data = mongoDataWithSingleKey( MARGIN_COLLECTION, STRATEGY_REF, mongo_db ) diff --git a/sysdata/mongodb/mongo_override.py b/sysdata/mongodb/mongo_override.py index 419e35a516..f252af5afc 100644 --- a/sysdata/mongodb/mongo_override.py +++ b/sysdata/mongodb/mongo_override.py @@ -18,7 +18,6 @@ class mongoOverrideData(overrideData): """ def __init__(self, mongo_db=None, log=get_logger("mongoOverrideData")): - super().__init__(log=log) self._mongo_data = mongoDataWithMultipleKeys( OVERRIDE_STATUS_COLLECTION, mongo_db=mongo_db diff --git a/sysdata/mongodb/mongo_position_limits.py b/sysdata/mongodb/mongo_position_limits.py index 3ff841d112..da4356f0a0 100644 --- a/sysdata/mongodb/mongo_position_limits.py +++ b/sysdata/mongodb/mongo_position_limits.py @@ -53,7 +53,6 @@ def get_all_instruments_with_limits(self) -> list: return list_of_instruments def get_all_instrument_strategies_with_limits(self) -> listOfInstrumentStrategies: - dict_of_keys = {MARKER_KEY: MARKER_STRATEGY_INSTRUMENT} list_of_dicts = self.mongo_data.get_list_of_result_dicts_for_dict_keys( dict_of_keys @@ -92,7 +91,6 @@ def delete_position_limit_for_instrument(self, instrument_code: str): def _get_abs_position_limit_for_instrument_strategy( self, instrument_strategy: instrumentStrategy ) -> int: - dict_of_keys = { MARKER_KEY: MARKER_STRATEGY_INSTRUMENT, STRATEGY_KEY: instrument_strategy.strategy_name, diff --git a/sysdata/mongodb/mongo_process_control.py b/sysdata/mongodb/mongo_process_control.py index 8eeb7a4bbe..85a4f60811 100644 --- a/sysdata/mongodb/mongo_process_control.py +++ b/sysdata/mongodb/mongo_process_control.py @@ -19,7 +19,6 @@ class mongoControlProcessData(controlProcessData): def __init__( self, mongo_db=arg_not_supplied, log=get_logger("mongoControlProcessData") ): - super().__init__(log=log) self._mongo_data = mongoDataWithSingleKey( diff --git a/sysdata/mongodb/mongo_roll_state_storage.py b/sysdata/mongodb/mongo_roll_state_storage.py index 169b67813f..be68d9c02e 100644 --- a/sysdata/mongodb/mongo_roll_state_storage.py +++ b/sysdata/mongodb/mongo_roll_state_storage.py @@ -15,7 +15,6 @@ class mongoRollStateData(rollStateData): """ def __init__(self, mongo_db=None, log=get_logger("mongoRollStateData")): - super().__init__(log=log) self._mongo_data = mongoDataWithSingleKey( diff --git a/sysdata/mongodb/mongo_temporary_close.py b/sysdata/mongodb/mongo_temporary_close.py index 008f670a8b..0b38def4b7 100644 --- a/sysdata/mongodb/mongo_temporary_close.py +++ b/sysdata/mongodb/mongo_temporary_close.py @@ -14,7 +14,6 @@ class mongoTemporaryCloseData(temporaryCloseData): def __init__( self, mongo_db=arg_not_supplied, log=get_logger("mongotemporaryCloseData") ): - super().__init__(log=log) self._mongo_data = mongoDataWithSingleKey( TEMPORARY_CLOSE_COLLECTION, "instrument_code", mongo_db=mongo_db diff --git a/sysdata/mongodb/mongo_temporary_override.py b/sysdata/mongodb/mongo_temporary_override.py index db74a79738..9b4b26916c 100644 --- a/sysdata/mongodb/mongo_temporary_override.py +++ b/sysdata/mongodb/mongo_temporary_override.py @@ -14,7 +14,6 @@ class mongoTemporaryOverrideData(temporaryOverrideData): def __init__( self, mongo_db=arg_not_supplied, log=get_logger("mongoTemporaryOverrideData") ): - super().__init__(log=log) self._mongo_data = mongoDataWithSingleKey( TEMPORARY_OVERRIDE_COLLECTION, KEY, mongo_db=mongo_db diff --git a/sysdata/mongodb/mongo_trade_limits.py b/sysdata/mongodb/mongo_trade_limits.py index 5801ebf1a0..ed5100f99a 100644 --- a/sysdata/mongodb/mongo_trade_limits.py +++ b/sysdata/mongodb/mongo_trade_limits.py @@ -42,7 +42,6 @@ def __repr__(self): def _get_trade_limit_as_dict_or_missing_data( self, instrument_strategy: instrumentStrategy, period_days: int ) -> dict: - instrument_strategy_key = instrument_strategy.key dict_of_keys = { INSTRUMENT_STRATEGY_KEY: instrument_strategy_key, @@ -60,7 +59,6 @@ def _get_trade_limit_as_dict_or_missing_data( def _get_old_style_trade_limit_as_dict_or_missing_data( self, instrument_strategy: instrumentStrategy, period_days: int ) -> dict: - dict_of_keys = { LEGACY_INSTRUMENT_KEY: instrument_strategy.instrument_code, LEGACY_STRATEGY_KEY: instrument_strategy.strategy_name, @@ -95,7 +93,6 @@ def _delete_old_style_data(self, instrument_strategy_key: str, period_days: int) self.mongo_data.delete_data_without_any_warning(dict_of_keys) def _get_all_limit_keys(self) -> listOfInstrumentStrategyKeyAndDays: - list_of_result_dicts = self.mongo_data.get_list_of_all_dicts() list_of_results = [ diff --git a/sysdata/mongodb/tests/test_mongodb.py b/sysdata/mongodb/tests/test_mongodb.py index 3014d03ad8..c520c2011e 100644 --- a/sysdata/mongodb/tests/test_mongodb.py +++ b/sysdata/mongodb/tests/test_mongodb.py @@ -3,7 +3,6 @@ class TestMongoDB: def test_hide_password(self): - # url examples from https://docs.mongodb.com/manual/reference/connection-string/ ip = "mongodb://127.0.0.1/production" diff --git a/sysdata/parquet/parquet_access.py b/sysdata/parquet/parquet_access.py index d69576a5ae..18eb498cf7 100644 --- a/sysdata/parquet/parquet_access.py +++ b/sysdata/parquet/parquet_access.py @@ -1,37 +1,65 @@ import os import pandas as pd -from syscore.fileutils import files_with_extension_in_pathname, resolve_path_and_filename_for_package, get_resolved_pathname, delete_file_if_too_old +from syscore.fileutils import ( + files_with_extension_in_pathname, + resolve_path_and_filename_for_package, + get_resolved_pathname, + delete_file_if_too_old, +) from pathlib import Path EXTENSION = "parquet" + + class ParquetAccess(object): def __init__(self, parquet_store_path: str): self.parquet_store = get_resolved_pathname(parquet_store_path) def get_all_identifiers_with_data_type(self, data_type: str): - path= self._get_pathname_given_data_type(data_type) + path = self._get_pathname_given_data_type(data_type) return files_with_extension_in_pathname(path, extension=EXTENSION) - def does_idenitifier_with_data_type_exist(self, data_type: str, identifier: str) -> bool: - filename = self._get_filename_given_data_type_and_identifier(data_type=data_type, identifier=identifier) + def does_idenitifier_with_data_type_exist( + self, data_type: str, identifier: str + ) -> bool: + filename = self._get_filename_given_data_type_and_identifier( + data_type=data_type, identifier=identifier + ) return os.path.isfile(filename) - def delete_data_given_data_type_and_identifier(self, data_type: str, identifier: str): - filename = self._get_filename_given_data_type_and_identifier(data_type=data_type, identifier=identifier) + def delete_data_given_data_type_and_identifier( + self, data_type: str, identifier: str + ): + filename = self._get_filename_given_data_type_and_identifier( + data_type=data_type, identifier=identifier + ) os.remove(filename) - def write_data_given_data_type_and_identifier(self, data_to_write: pd.DataFrame, data_type: str, identifier: str): - filename = self._get_filename_given_data_type_and_identifier(data_type=data_type, identifier=identifier) - data_to_write.to_parquet(filename, coerce_timestamps='us',allow_truncated_timestamps=True) + def write_data_given_data_type_and_identifier( + self, data_to_write: pd.DataFrame, data_type: str, identifier: str + ): + filename = self._get_filename_given_data_type_and_identifier( + data_type=data_type, identifier=identifier + ) + data_to_write.to_parquet( + filename, coerce_timestamps="us", allow_truncated_timestamps=True + ) - def read_data_given_data_type_and_identifier(self, data_type: str, identifier: str) -> pd.DataFrame: - filename = self._get_filename_given_data_type_and_identifier(data_type=data_type, identifier=identifier) + def read_data_given_data_type_and_identifier( + self, data_type: str, identifier: str + ) -> pd.DataFrame: + filename = self._get_filename_given_data_type_and_identifier( + data_type=data_type, identifier=identifier + ) return pd.read_parquet(filename) - - def _get_filename_given_data_type_and_identifier(self, data_type: str, identifier: str): + def _get_filename_given_data_type_and_identifier( + self, data_type: str, identifier: str + ): path = self._get_pathname_given_data_type(data_type) - return resolve_path_and_filename_for_package(path, seperate_filename="%s.%s" % (identifier, EXTENSION)) + return resolve_path_and_filename_for_package( + path, seperate_filename="%s.%s" % (identifier, EXTENSION) + ) def _get_pathname_given_data_type(self, data_type: str): root = self.parquet_store @@ -39,4 +67,3 @@ def _get_pathname_given_data_type(self, data_type: str): Path(path).mkdir(parents=True, exist_ok=True) return path - diff --git a/sysdata/parquet/parquet_adjusted_prices.py b/sysdata/parquet/parquet_adjusted_prices.py index 56b5a51e6f..1ce6286a88 100644 --- a/sysdata/parquet/parquet_adjusted_prices.py +++ b/sysdata/parquet/parquet_adjusted_prices.py @@ -15,8 +15,11 @@ class parquetFuturesAdjustedPricesData(futuresAdjustedPricesData): Class to read / write multiple futures price data to and from arctic """ - def __init__(self, parquet_access: ParquetAccess, log=get_logger("parquetFuturesAdjustedPrices")): - + def __init__( + self, + parquet_access: ParquetAccess, + log=get_logger("parquetFuturesAdjustedPrices"), + ): super().__init__(log=log) self._parquet = parquet_access @@ -28,18 +31,24 @@ def parquet(self) -> ParquetAccess: return self._parquet def get_list_of_instruments(self) -> list: - return self.parquet.get_all_identifiers_with_data_type(data_type=ADJPRICE_COLLECTION) + return self.parquet.get_all_identifiers_with_data_type( + data_type=ADJPRICE_COLLECTION + ) def _get_adjusted_prices_without_checking( self, instrument_code: str ) -> futuresAdjustedPrices: - raw_price_df = self.parquet.read_data_given_data_type_and_identifier(data_type=ADJPRICE_COLLECTION, identifier=instrument_code) + raw_price_df = self.parquet.read_data_given_data_type_and_identifier( + data_type=ADJPRICE_COLLECTION, identifier=instrument_code + ) return futuresAdjustedPrices(raw_price_df.squeeze()) def _delete_adjusted_prices_without_any_warning_be_careful( self, instrument_code: str ): - self.parquet.delete_data_given_data_type_and_identifier(data_type=ADJPRICE_COLLECTION, identifier=instrument_code) + self.parquet.delete_data_given_data_type_and_identifier( + data_type=ADJPRICE_COLLECTION, identifier=instrument_code + ) self.log.debug( "Deleted adjusted prices for %s from %s" % (instrument_code, str(self)), instrument_code=instrument_code, @@ -52,7 +61,11 @@ def _add_adjusted_prices_without_checking_for_existing_entry( adjusted_price_data_aspd.columns = ["price"] adjusted_price_data_aspd = adjusted_price_data_aspd.astype(float) - self.parquet.write_data_given_data_type_and_identifier(data_to_write=adjusted_price_data_aspd, data_type=ADJPRICE_COLLECTION, identifier=instrument_code) + self.parquet.write_data_given_data_type_and_identifier( + data_to_write=adjusted_price_data_aspd, + data_type=ADJPRICE_COLLECTION, + identifier=instrument_code, + ) self.log.debug( "Wrote %s lines of prices for %s to %s" diff --git a/sysdata/parquet/parquet_capital.py b/sysdata/parquet/parquet_capital.py index 1e9a794c93..26b1d9fa67 100644 --- a/sysdata/parquet/parquet_capital.py +++ b/sysdata/parquet/parquet_capital.py @@ -13,8 +13,9 @@ class parquetCapitalData(capitalData): Class to read / write multiple total capital data to and from arctic """ - def __init__(self, parquet_access: ParquetAccess, log=get_logger("parquetCapitalData")): - + def __init__( + self, parquet_access: ParquetAccess, log=get_logger("parquetCapitalData") + ): super().__init__(log=log) self._parquet = parquet_access @@ -22,31 +23,41 @@ def __repr__(self): return "parquetCapitalData" @property - def parquet(self)-> ParquetAccess: + def parquet(self) -> ParquetAccess: return self._parquet def _get_list_of_strategies_with_capital_including_total(self) -> list: - return self.parquet.get_all_identifiers_with_data_type(data_type=CAPITAL_COLLECTION) + return self.parquet.get_all_identifiers_with_data_type( + data_type=CAPITAL_COLLECTION + ) def get_capital_pd_df_for_strategy(self, strategy_name: str) -> pd.DataFrame: try: - pd_df = self.parquet.read_data_given_data_type_and_identifier(data_type=CAPITAL_COLLECTION, identifier=strategy_name) + pd_df = self.parquet.read_data_given_data_type_and_identifier( + data_type=CAPITAL_COLLECTION, identifier=strategy_name + ) except: raise missingData( - "Unable to get capital data from parquet for strategy %s" % strategy_name + "Unable to get capital data from parquet for strategy %s" + % strategy_name ) return pd_df def _delete_all_capital_for_strategy_no_checking(self, strategy_name: str): - - self.parquet.delete_data_given_data_type_and_identifier(data_type=CAPITAL_COLLECTION, identifier=strategy_name) + self.parquet.delete_data_given_data_type_and_identifier( + data_type=CAPITAL_COLLECTION, identifier=strategy_name + ) def update_capital_pd_df_for_strategy( self, strategy_name: str, updated_capital_df: pd.DataFrame ): - if len(updated_capital_df.columns)==1: + if len(updated_capital_df.columns) == 1: ## single strategy, need columns labelling - updated_capital_df.columns = ['capital'] + updated_capital_df.columns = ["capital"] - self.parquet.write_data_given_data_type_and_identifier(data_to_write=updated_capital_df, identifier=strategy_name, data_type=CAPITAL_COLLECTION) + self.parquet.write_data_given_data_type_and_identifier( + data_to_write=updated_capital_df, + identifier=strategy_name, + data_type=CAPITAL_COLLECTION, + ) diff --git a/sysdata/parquet/parquet_futures_per_contract_prices.py b/sysdata/parquet/parquet_futures_per_contract_prices.py index 0ed4115e42..580574c226 100644 --- a/sysdata/parquet/parquet_futures_per_contract_prices.py +++ b/sysdata/parquet/parquet_futures_per_contract_prices.py @@ -21,8 +21,11 @@ class parquetFuturesContractPriceData(futuresContractPriceData): Class to read / write futures price data to and from arctic """ - def __init__(self, parquet_access: ParquetAccess, log=get_logger("parquetFuturesContractPriceData")): - + def __init__( + self, + parquet_access: ParquetAccess, + log=get_logger("parquetFuturesContractPriceData"), + ): super().__init__(log=log) self._parquet = parquet_access @@ -36,7 +39,6 @@ def parquet(self) -> ParquetAccess: def _get_merged_prices_for_contract_object_no_checking( self, futures_contract_object: futuresContract ) -> futuresContractPrices: - # Returns a data frame which should have the right format data = self._get_prices_at_frequency_for_contract_object_no_checking( futures_contract_object, frequency=MIXED_FREQ @@ -47,13 +49,14 @@ def _get_merged_prices_for_contract_object_no_checking( def _get_prices_at_frequency_for_contract_object_no_checking( self, futures_contract_object: futuresContract, frequency: Frequency ) -> futuresContractPrices: - ident = from_contract_and_freq_to_key( futures_contract_object, frequency=frequency ) # Returns a data frame which should have the right format - data = self.parquet.read_data_given_data_type_and_identifier(data_type=CONTRACT_COLLECTION, identifier=ident) + data = self.parquet.read_data_given_data_type_and_identifier( + data_type=CONTRACT_COLLECTION, identifier=ident + ) return futuresContractPrices(data) @@ -83,14 +86,17 @@ def _write_prices_at_frequency_for_contract_object_no_checking( futures_price_data: futuresContractPrices, frequency: Frequency, ): - log = futures_contract_object.log(self.log) ident = from_contract_and_freq_to_key( futures_contract_object, frequency=frequency ) futures_price_data_as_pd = pd.DataFrame(futures_price_data) - self.parquet.write_data_given_data_type_and_identifier(data_type=CONTRACT_COLLECTION, identifier=ident, data_to_write=futures_price_data_as_pd) + self.parquet.write_data_given_data_type_and_identifier( + data_type=CONTRACT_COLLECTION, + identifier=ident, + data_to_write=futures_price_data_as_pd, + ) log.debug( "Wrote %s lines of prices for %s at %s to %s" @@ -117,7 +123,6 @@ def get_contracts_with_merged_price_data(self) -> listOfFuturesContracts: def get_contracts_with_price_data_for_frequency( self, frequency: Frequency ) -> listOfFuturesContracts: - list_of_contract_and_freq_tuples = ( self._get_contract_and_frequencies_with_price_data() ) @@ -141,8 +146,10 @@ def has_merged_price_data_for_contract( def has_price_data_for_contract_at_frequency( self, contract_object: futuresContract, frequency: Frequency ) -> bool: - ident =from_contract_and_freq_to_key(contract_object, frequency=frequency) - return self.parquet.does_idenitifier_with_data_type_exist(data_type=CONTRACT_COLLECTION, identifier=ident) + ident = from_contract_and_freq_to_key(contract_object, frequency=frequency) + return self.parquet.does_idenitifier_with_data_type_exist( + data_type=CONTRACT_COLLECTION, identifier=ident + ) def _get_contract_and_frequencies_with_price_data(self) -> list: """ @@ -158,7 +165,9 @@ def _get_contract_and_frequencies_with_price_data(self) -> list: return list_of_contract_and_freq_tuples def _all_keynames_in_library(self) -> list: - return self.parquet.get_all_identifiers_with_data_type(data_type=CONTRACT_COLLECTION) + return self.parquet.get_all_identifiers_with_data_type( + data_type=CONTRACT_COLLECTION + ) def _delete_merged_prices_for_contract_object_with_no_checks_be_careful( self, futures_contract_object: futuresContract @@ -183,7 +192,9 @@ def _delete_prices_at_frequency_for_contract_object_with_no_checks_be_careful( ident = from_contract_and_freq_to_key( contract=futures_contract_object, frequency=frequency ) - self.parquet.delete_data_given_data_type_and_identifier(data_type=CONTRACT_COLLECTION, identifier=ident) + self.parquet.delete_data_given_data_type_and_identifier( + data_type=CONTRACT_COLLECTION, identifier=ident + ) log.debug( "Deleted all prices for %s from %s" % (futures_contract_object.key, str(self)) @@ -205,7 +216,9 @@ def from_key_to_freq_and_contract(keyname) -> Tuple[Frequency, futuresContract]: return frequency, futures_contract -def from_contract_and_freq_to_key(contract: futuresContract, frequency: Frequency) -> str: +def from_contract_and_freq_to_key( + contract: futuresContract, frequency: Frequency +) -> str: if frequency is MIXED_FREQ: frequency_str = "" else: diff --git a/sysdata/parquet/parquet_historic_contract_positions.py b/sysdata/parquet/parquet_historic_contract_positions.py index b2cc6c7537..2e1ea4652e 100644 --- a/sysdata/parquet/parquet_historic_contract_positions.py +++ b/sysdata/parquet/parquet_historic_contract_positions.py @@ -1,4 +1,3 @@ - import pandas as pd from sysobjects.contracts import futuresContract, listOfFuturesContracts @@ -14,8 +13,11 @@ class parquetContractPositionData(contractPositionData): - def __init__(self, parquet_access: ParquetAccess, log=get_logger("parquetContractPositionData")): - + def __init__( + self, + parquet_access: ParquetAccess, + log=get_logger("parquetContractPositionData"), + ): super().__init__(log=log) self._parquet = parquet_access @@ -35,20 +37,28 @@ def _write_updated_position_series_for_contract_object( updated_data_as_df = pd.DataFrame(updated_series) updated_data_as_df.columns = ["position"] - self.parquet.write_data_given_data_type_and_identifier(data_to_write=updated_data_as_df, identifier=ident, data_type=CONTRACT_POSITION_COLLECTION) + self.parquet.write_data_given_data_type_and_identifier( + data_to_write=updated_data_as_df, + identifier=ident, + data_type=CONTRACT_POSITION_COLLECTION, + ) def _delete_position_series_for_contract_object_without_checking( self, contract_object: futuresContract ): ident = from_contract_to_key(contract_object) - self.parquet.delete_data_given_data_type_and_identifier(data_type=CONTRACT_POSITION_COLLECTION, identifier=ident) + self.parquet.delete_data_given_data_type_and_identifier( + data_type=CONTRACT_POSITION_COLLECTION, identifier=ident + ) def get_position_as_series_for_contract_object( self, contract_object: futuresContract ) -> pd.Series: keyname = from_contract_to_key(contract_object) try: - pd_df = self.parquet.read_data_given_data_type_and_identifier(data_type=CONTRACT_POSITION_COLLECTION, identifier=keyname) + pd_df = self.parquet.read_data_given_data_type_and_identifier( + data_type=CONTRACT_POSITION_COLLECTION, identifier=keyname + ) except: raise missingData @@ -56,16 +66,18 @@ def get_position_as_series_for_contract_object( def get_list_of_contracts(self) -> listOfFuturesContracts: ## doesn't remove zero positions - list_of_keys = self.parquet.get_all_identifiers_with_data_type(data_type=CONTRACT_POSITION_COLLECTION) - list_of_futures_contract = [ - from_key_to_contract(key) for key in list_of_keys - ] + list_of_keys = self.parquet.get_all_identifiers_with_data_type( + data_type=CONTRACT_POSITION_COLLECTION + ) + list_of_futures_contract = [from_key_to_contract(key) for key in list_of_keys] return listOfFuturesContracts(list_of_futures_contract) + def from_contract_to_key(contract: futuresContract) -> str: - return str(contract.instrument_code)+"#"+str(contract.contract_date) + return str(contract.instrument_code) + "#" + str(contract.contract_date) + def from_key_to_contract(key: str) -> futuresContract: [instrument_code, contract_date] = key.split("#") - return futuresContract(instrument_code, contract_date) \ No newline at end of file + return futuresContract(instrument_code, contract_date) diff --git a/sysdata/parquet/parquet_historic_strategy_positions.py b/sysdata/parquet/parquet_historic_strategy_positions.py index f016046efc..dd19d8ddb3 100644 --- a/sysdata/parquet/parquet_historic_strategy_positions.py +++ b/sysdata/parquet/parquet_historic_strategy_positions.py @@ -14,8 +14,11 @@ class parquetStrategyPositionData(strategyPositionData): - def __init__(self, parquet_access: ParquetAccess, log=get_logger("parquetStrategyPositionData")): - + def __init__( + self, + parquet_access: ParquetAccess, + log=get_logger("parquetStrategyPositionData"), + ): super().__init__(log=log) self._parquet = parquet_access @@ -28,7 +31,9 @@ def parquet(self): return self._parquet def get_list_of_instrument_strategies(self) -> listOfInstrumentStrategies: - list_of_keys = self.parquet.get_all_identifiers_with_data_type(data_type=STRATEGY_POSITION_COLLECTION) + list_of_keys = self.parquet.get_all_identifiers_with_data_type( + data_type=STRATEGY_POSITION_COLLECTION + ) list_of_instrument_strategies = [ instrumentStrategy.from_key(key) for key in list_of_keys ] @@ -38,26 +43,32 @@ def get_list_of_instrument_strategies(self) -> listOfInstrumentStrategies: def _write_updated_position_series_for_instrument_strategy_object( self, instrument_strategy: instrumentStrategy, updated_series: pd.Series ): - ident = instrument_strategy.key updated_data_as_df = pd.DataFrame(updated_series) updated_data_as_df.columns = ["position"] - self.parquet.write_data_given_data_type_and_identifier(data_to_write=updated_data_as_df, identifier=ident, data_type=STRATEGY_POSITION_COLLECTION) + self.parquet.write_data_given_data_type_and_identifier( + data_to_write=updated_data_as_df, + identifier=ident, + data_type=STRATEGY_POSITION_COLLECTION, + ) def _delete_position_series_for_instrument_strategy_object_without_checking( self, instrument_strategy: instrumentStrategy ): ident = instrument_strategy.key - self.parquet.delete_data_given_data_type_and_identifier(data_type=STRATEGY_POSITION_COLLECTION, identifier=ident) + self.parquet.delete_data_given_data_type_and_identifier( + data_type=STRATEGY_POSITION_COLLECTION, identifier=ident + ) def get_position_as_series_for_instrument_strategy_object( self, instrument_strategy: instrumentStrategy ) -> pd.Series: - keyname = instrument_strategy.key try: - pd_df = self.parquet.read_data_given_data_type_and_identifier(data_type=STRATEGY_POSITION_COLLECTION, identifier=keyname) + pd_df = self.parquet.read_data_given_data_type_and_identifier( + data_type=STRATEGY_POSITION_COLLECTION, identifier=keyname + ) except: raise missingData diff --git a/sysdata/parquet/parquet_multiple_prices.py b/sysdata/parquet/parquet_multiple_prices.py index b8e9b50b28..fd98d1be21 100644 --- a/sysdata/parquet/parquet_multiple_prices.py +++ b/sysdata/parquet/parquet_multiple_prices.py @@ -23,9 +23,10 @@ class parquetFuturesMultiplePricesData(futuresMultiplePricesData): """ def __init__( - self,parquet_access: ParquetAccess, log=get_logger("parquetFuturesMultiplePricesData") + self, + parquet_access: ParquetAccess, + log=get_logger("parquetFuturesMultiplePricesData"), ): - super().__init__(log=log) self._parquet = parquet_access @@ -37,20 +38,25 @@ def parquet(self): return self._parquet def get_list_of_instruments(self) -> list: - return self.parquet.get_all_identifiers_with_data_type(data_type=MULTIPLE_COLLECTION) + return self.parquet.get_all_identifiers_with_data_type( + data_type=MULTIPLE_COLLECTION + ) def _get_multiple_prices_without_checking( self, instrument_code: str ) -> futuresMultiplePrices: - data = self.parquet.read_data_given_data_type_and_identifier(data_type=MULTIPLE_COLLECTION, identifier=instrument_code) + data = self.parquet.read_data_given_data_type_and_identifier( + data_type=MULTIPLE_COLLECTION, identifier=instrument_code + ) return futuresMultiplePrices(data) def _delete_multiple_prices_without_any_warning_be_careful( self, instrument_code: str ): - - self.parquet.delete_data_given_data_type_and_identifier(data_type=MULTIPLE_COLLECTION, identifier=instrument_code) + self.parquet.delete_data_given_data_type_and_identifier( + data_type=MULTIPLE_COLLECTION, identifier=instrument_code + ) self.log.debug( "Deleted multiple prices for %s from %s" % (instrument_code, str(self)) ) @@ -58,11 +64,14 @@ def _delete_multiple_prices_without_any_warning_be_careful( def _add_multiple_prices_without_checking_for_existing_entry( self, instrument_code: str, multiple_price_data_object: futuresMultiplePrices ): - multiple_price_data_aspd = pd.DataFrame(multiple_price_data_object) multiple_price_data_aspd = _change_contracts_to_str(multiple_price_data_aspd) - self.parquet.write_data_given_data_type_and_identifier(data_type=MULTIPLE_COLLECTION, identifier=instrument_code, data_to_write=multiple_price_data_aspd) + self.parquet.write_data_given_data_type_and_identifier( + data_type=MULTIPLE_COLLECTION, + identifier=instrument_code, + data_to_write=multiple_price_data_aspd, + ) self.log.debug( "Wrote %s lines of prices for %s to %s" % (len(multiple_price_data_aspd), instrument_code, str(self)), diff --git a/sysdata/parquet/parquet_optimal_positions.py b/sysdata/parquet/parquet_optimal_positions.py index c9764f43d4..5837481cbd 100644 --- a/sysdata/parquet/parquet_optimal_positions.py +++ b/sysdata/parquet/parquet_optimal_positions.py @@ -1,4 +1,3 @@ - from syscore.exceptions import missingData from sysdata.parquet.parquet_access import ParquetAccess from sysdata.production.optimal_positions import optimalPositionData @@ -15,8 +14,11 @@ class parquetOptimalPositionData(optimalPositionData): - def __init__(self, parquet_access: ParquetAccess, log=get_logger("parquetOptimalPositionData")): - + def __init__( + self, + parquet_access: ParquetAccess, + log=get_logger("parquetOptimalPositionData"), + ): super().__init__(log=log) self._parquet = parquet_access @@ -30,8 +32,11 @@ def parquet(self): def get_list_of_instrument_strategies_with_optimal_position( self, ) -> listOfInstrumentStrategies: - - raw_list_of_instrument_strategies = self.parquet.get_all_identifiers_with_data_type(data_type=OPTIMAL_POSITION_COLLECTION) + raw_list_of_instrument_strategies = ( + self.parquet.get_all_identifiers_with_data_type( + data_type=OPTIMAL_POSITION_COLLECTION + ) + ) list_of_instrument_strategies = [ instrumentStrategy.from_key(key) for key in raw_list_of_instrument_strategies @@ -42,10 +47,11 @@ def get_list_of_instrument_strategies_with_optimal_position( def get_optimal_position_as_df_for_instrument_strategy( self, instrument_strategy: instrumentStrategy ) -> pd.DataFrame: - try: ident = instrument_strategy.key - df_result = self.parquet.read_data_given_data_type_and_identifier(data_type=OPTIMAL_POSITION_COLLECTION, identifier=ident) + df_result = self.parquet.read_data_given_data_type_and_identifier( + data_type=OPTIMAL_POSITION_COLLECTION, identifier=ident + ) except: raise missingData @@ -57,4 +63,8 @@ def write_optimal_position_as_df_for_instrument_strategy_without_checking( optimal_positions_as_df: pd.DataFrame, ): ident = instrument_strategy.key - self.parquet.write_data_given_data_type_and_identifier(data_type=OPTIMAL_POSITION_COLLECTION, identifier=ident, data_to_write=optimal_positions_as_df) + self.parquet.write_data_given_data_type_and_identifier( + data_type=OPTIMAL_POSITION_COLLECTION, + identifier=ident, + data_to_write=optimal_positions_as_df, + ) diff --git a/sysdata/parquet/parquet_spotfx_prices.py b/sysdata/parquet/parquet_spotfx_prices.py index d7deee9b07..aa597cbdc5 100644 --- a/sysdata/parquet/parquet_spotfx_prices.py +++ b/sysdata/parquet/parquet_spotfx_prices.py @@ -13,8 +13,9 @@ class parquetFxPricesData(fxPricesData): Class to read / write fx prices """ - def __init__(self, parquet_access: ParquetAccess, log=get_logger("parquetFxPricesData")): - + def __init__( + self, parquet_access: ParquetAccess, log=get_logger("parquetFxPricesData") + ): super().__init__(log=log) self._parquet = parquet_access @@ -23,14 +24,17 @@ def parquet(self): return self._parquet def __repr__(self): - return 'parquetFxPricesData' + return "parquetFxPricesData" def get_list_of_fxcodes(self) -> list: - return self.parquet.get_all_identifiers_with_data_type(data_type=SPOTFX_COLLECTION) + return self.parquet.get_all_identifiers_with_data_type( + data_type=SPOTFX_COLLECTION + ) def _get_fx_prices_without_checking(self, currency_code: str) -> fxPrices: - - fx_data = self.parquet.read_data_given_data_type_and_identifier(data_type=SPOTFX_COLLECTION, identifier=currency_code) + fx_data = self.parquet.read_data_given_data_type_and_identifier( + data_type=SPOTFX_COLLECTION, identifier=currency_code + ) fx_prices = fxPrices(fx_data[fx_data.columns[0]]) @@ -38,7 +42,9 @@ def _get_fx_prices_without_checking(self, currency_code: str) -> fxPrices: def _delete_fx_prices_without_any_warning_be_careful(self, currency_code: str): log = self.log.setup(**{CURRENCY_CODE_LOG_LABEL: currency_code}) - self.parquet.delete_data_given_data_type_and_identifier(data_type=SPOTFX_COLLECTION, identifier=currency_code) + self.parquet.delete_data_given_data_type_and_identifier( + data_type=SPOTFX_COLLECTION, identifier=currency_code + ) log.debug("Deleted fX prices for %s from %s" % (currency_code, str(self))) def _add_fx_prices_without_checking_for_existing_entry( @@ -50,7 +56,11 @@ def _add_fx_prices_without_checking_for_existing_entry( fx_price_data_aspd.columns = ["price"] fx_price_data_aspd = fx_price_data_aspd.astype(float) - self.parquet.write_data_given_data_type_and_identifier(data_type=SPOTFX_COLLECTION, identifier=currency_code, data_to_write=fx_price_data_aspd) + self.parquet.write_data_given_data_type_and_identifier( + data_type=SPOTFX_COLLECTION, + identifier=currency_code, + data_to_write=fx_price_data_aspd, + ) log.debug( "Wrote %s lines of prices for %s to %s" % (len(fx_price_data), currency_code, str(self)) diff --git a/sysdata/parquet/parquet_spreads.py b/sysdata/parquet/parquet_spreads.py index 1daa76b13a..033d537957 100644 --- a/sysdata/parquet/parquet_spreads.py +++ b/sysdata/parquet/parquet_spreads.py @@ -10,8 +10,11 @@ class parquetSpreadsForInstrumentData(spreadsForInstrumentData): - def __init__(self, parquet_access: ParquetAccess, log=get_logger("parquetSpreadsForInstrument")): - + def __init__( + self, + parquet_access: ParquetAccess, + log=get_logger("parquetSpreadsForInstrument"), + ): super().__init__(log=log) self._parquet = parquet_access @@ -24,19 +27,25 @@ def parquet(self): return self._parquet def get_list_of_instruments(self) -> list: - return self.parquet.get_all_identifiers_with_data_type(data_type=SPREAD_COLLECTION) + return self.parquet.get_all_identifiers_with_data_type( + data_type=SPREAD_COLLECTION + ) def _get_spreads_without_checking( self, instrument_code: str ) -> spreadsForInstrument: - data = self.parquet.read_data_given_data_type_and_identifier(data_type=SPREAD_COLLECTION, identifier=instrument_code) + data = self.parquet.read_data_given_data_type_and_identifier( + data_type=SPREAD_COLLECTION, identifier=instrument_code + ) spreads = spreadsForInstrument(data[SPREAD_COLUMN_NAME]) return spreads def _delete_spreads_without_any_warning_be_careful(self, instrument_code: str): - self.parquet.delete_data_given_data_type_and_identifier(data_type=SPREAD_COLLECTION, identifier=instrument_code) + self.parquet.delete_data_given_data_type_and_identifier( + data_type=SPREAD_COLLECTION, identifier=instrument_code + ) self.log.debug( "Deleted spreads for %s from %s" % (instrument_code, str(self)), instrument_code=instrument_code, @@ -48,7 +57,11 @@ def _add_spreads_without_checking_for_existing_entry( spreads_as_pd = pd.DataFrame(spreads) spreads_as_pd.columns = [SPREAD_COLUMN_NAME] spreads_as_pd = spreads_as_pd.astype(float) - self.parquet.write_data_given_data_type_and_identifier(data_type=SPREAD_COLLECTION, data_to_write=spreads_as_pd, identifier=instrument_code) + self.parquet.write_data_given_data_type_and_identifier( + data_type=SPREAD_COLLECTION, + data_to_write=spreads_as_pd, + identifier=instrument_code, + ) self.log.debug( "Wrote %s lines of spreads for %s to %s" % (len(spreads_as_pd), instrument_code, str(self)), diff --git a/sysdata/production/broker_client_id.py b/sysdata/production/broker_client_id.py index 1948b25d85..a9e086b25d 100644 --- a/sysdata/production/broker_client_id.py +++ b/sysdata/production/broker_client_id.py @@ -13,7 +13,6 @@ def __init__( idoffset: int = 0, log=get_logger("brokerClientIdTracker"), ): - super().__init__(log=log) self._idoffset = idoffset @@ -121,7 +120,6 @@ def get_next_id_from_current_list(current_list_of_ids: list, id_offset: int = 0) def get_next_id_from_current_list_and_full_set( current_list_of_ids: list, full_set_of_available_ids: set ) -> int: - unused_values = full_set_of_available_ids - set(current_list_of_ids) if len(unused_values) == 0: # no gaps, return the higest number plus 1 diff --git a/sysdata/production/capital.py b/sysdata/production/capital.py index 2cca790b67..2f68095538 100644 --- a/sysdata/production/capital.py +++ b/sysdata/production/capital.py @@ -132,7 +132,6 @@ def delete_recent_capital(self, last_date: datetime.datetime): ) def delete_all_global_capital(self, are_you_really_sure=False): - self.delete_all_capital_for_strategy( GLOBAL_CAPITAL_DICT_KEY, are_you_really_sure=are_you_really_sure ) @@ -144,7 +143,6 @@ def get_df_of_all_global_capital(self) -> pd.DataFrame: return capital_df def update_df_of_all_global_capital(self, updated_capital_series: pd.DataFrame): - ## ignore warning - for global we pass a Frame not a Series self.update_capital_pd_df_for_strategy( GLOBAL_CAPITAL_DICT_KEY, updated_capital_series @@ -190,7 +188,6 @@ def delete_recent_capital_for_strategy( def delete_all_capital_for_strategy( self, strategy_name: str, are_you_really_sure=False ): - if are_you_really_sure: self._delete_all_capital_for_strategy_no_checking(strategy_name) else: @@ -212,13 +209,11 @@ def get_capital_pd_df_for_strategy(self, strategy_name: str) -> pd.DataFrame: raise NotImplementedError def _delete_all_capital_for_strategy_no_checking(self, strategy_name: str): - raise NotImplementedError def update_capital_pd_df_for_strategy( self, strategy_name: str, updated_capital_df: pd.DataFrame ): - raise NotImplementedError @@ -326,7 +321,6 @@ def get_df_of_all_global_capital(self) -> pd.DataFrame: def update_and_return_total_capital_with_new_broker_account_value( self, broker_account_value: float, check_limit=0.1 ) -> float: - """ does everything you'd expect when a new broker account value arrives: - add on to broker account value series @@ -354,7 +348,6 @@ def update_and_return_total_capital_with_new_broker_account_value( def _init_capital_updater( self, new_broker_account_value: float ) -> totalCapitalUpdater: - calc_method = self.calc_method try: prev_broker_account_value = self._get_prev_broker_account_value() @@ -384,7 +377,6 @@ def _get_prev_broker_account_value(self) -> float: def _update_capital_data_after_pandl_event( self, capital_updater: totalCapitalUpdater ): - # Update broker account value and add p&l entry with synched dates date = datetime.datetime.now() diff --git a/sysdata/production/historic_contract_positions.py b/sysdata/production/historic_contract_positions.py index 78a5c25101..cfe628cd63 100644 --- a/sysdata/production/historic_contract_positions.py +++ b/sysdata/production/historic_contract_positions.py @@ -86,7 +86,6 @@ def get_list_of_contract_date_str_with_any_position_for_instrument_in_date_range start_date: datetime.datetime, end_date: datetime.datetime, ) -> List[str]: - list_of_contracts = self.get_list_of_contracts_for_instrument_code( instrument_code ) @@ -144,7 +143,6 @@ def any_positions_for_contract_in_date_range( start_date: datetime.datetime, end_date: datetime.datetime, ) -> bool: - try: series_of_positions = self.get_position_as_series_for_contract_object( contract @@ -188,7 +186,6 @@ def _update_position_for_contract_object_with_date_and_existing_data( current_series: pd.Series, new_position_series: pd.Series, ): - try: assert new_position_series.index[0] > current_series.index[-1] except: diff --git a/sysdata/production/historic_orders.py b/sysdata/production/historic_orders.py index fd672b3eab..8d26efedcf 100644 --- a/sysdata/production/historic_orders.py +++ b/sysdata/production/historic_orders.py @@ -64,7 +64,6 @@ def get_list_of_order_ids_in_date_range( period_start: datetime.datetime, period_end: datetime.datetime = arg_not_supplied, ) -> list: - raise NotImplementedError @@ -104,7 +103,6 @@ def get_list_of_orders_for_instrument_strategy( def get_list_of_order_ids_for_instrument_strategy( self, instrument_strategy: instrumentStrategy ): - raise NotImplementedError diff --git a/sysdata/production/historic_strategy_positions.py b/sysdata/production/historic_strategy_positions.py index 75d1909d7e..6337ed2fd9 100644 --- a/sysdata/production/historic_strategy_positions.py +++ b/sysdata/production/historic_strategy_positions.py @@ -29,7 +29,6 @@ def __repr__(self): def get_current_position_for_instrument_strategy_object( self, instrument_strategy: instrumentStrategy ) -> int: - try: position_series = ( self.get_position_as_series_for_instrument_strategy_object( @@ -68,7 +67,6 @@ def update_position_for_instrument_strategy_object( def get_list_of_strategies_and_instruments_with_positions( self, ignore_zero_positions: bool = True ) -> listOfInstrumentStrategies: - list_of_instrument_strategies = self.get_list_of_instrument_strategies() if ignore_zero_positions: @@ -90,7 +88,6 @@ def get_list_of_strategies_and_instruments_with_positions( def get_list_of_instruments_for_strategy_with_position( self, strategy_name, ignore_zero_positions=True ) -> list: - list_of_instrument_strategies = ( self.get_list_of_strategies_and_instruments_with_positions( ignore_zero_positions=ignore_zero_positions @@ -192,7 +189,6 @@ def _update_position_for_instrument_strategy_object_with_date( position: int, date_index: datetime.datetime, ): - new_position_series = pd.Series([position], index=[date_index]) try: @@ -218,7 +214,6 @@ def _update_position_for_instrument_strategy_object_with_date_and_existing_data( current_series: pd.Series, new_position_series: pd.Series, ): - try: assert new_position_series.index[0] > current_series.index[-1] except: @@ -254,5 +249,4 @@ def get_list_of_instrument_strategies(self) -> listOfInstrumentStrategies: def get_position_as_series_for_instrument_strategy_object( self, instrument_strategy: instrumentStrategy ) -> pd.Series: - raise NotImplementedError diff --git a/sysdata/production/margin.py b/sysdata/production/margin.py index 7c44c1072a..daee162e24 100644 --- a/sysdata/production/margin.py +++ b/sysdata/production/margin.py @@ -13,7 +13,6 @@ def final_value(self) -> float: return self.values[-1] def add_value(self, value: float, dateref=datetime.datetime.now()): - return seriesOfMargin(self.append(pd.Series([value], index=[dateref]))) diff --git a/sysdata/production/optimal_positions.py b/sysdata/production/optimal_positions.py index af94238d35..88230f2652 100644 --- a/sysdata/production/optimal_positions.py +++ b/sysdata/production/optimal_positions.py @@ -70,7 +70,6 @@ def get_list_of_optimal_positions( def get_list_of_optimal_positions_given_list_of_instrument_strategies( self, list_of_instrument_strategies: listOfInstrumentStrategies ) -> listOfOptimalPositionsAcrossInstrumentStrategies: - list_of_optimal_positions_and_instrument_strategies = [ self.get_instrument_strategy_and_optimal_position(instrument_strategy) for instrument_strategy in list_of_instrument_strategies @@ -87,7 +86,6 @@ def get_list_of_optimal_positions_given_list_of_instrument_strategies( def get_instrument_strategy_and_optimal_position( self, instrument_strategy: instrumentStrategy ) -> instrumentStrategyAndOptimalPosition: - optimal_position = self.get_current_optimal_position_for_instrument_strategy( instrument_strategy ) @@ -100,7 +98,6 @@ def get_instrument_strategy_and_optimal_position( def get_list_of_instruments_for_strategy_with_optimal_position( self, strategy_name: str ) -> list: - list_of_instrument_strategies = ( self.get_list_of_instrument_strategies_with_optimal_position() ) @@ -125,7 +122,6 @@ def list_of_strategies_with_optimal_position(self) -> list: def get_list_of_instrument_strategies_for_strategy_with_optimal_position( self, strategy_name: str ) -> listOfInstrumentStrategies: - list_of_instrument_strategies = ( self.get_list_of_instrument_strategies_with_optimal_position() ) @@ -138,7 +134,6 @@ def get_list_of_instrument_strategies_for_strategy_with_optimal_position( def get_current_optimal_position_for_instrument_strategy( self, instrument_strategy: instrumentStrategy ) -> baseOptimalPosition: - existing_optimal_positions_as_df = ( self.get_optimal_position_as_df_for_instrument_strategy(instrument_strategy) ) @@ -177,13 +172,11 @@ def update_optimal_position_for_instrument_strategy( def get_list_of_instrument_strategies_with_optimal_position( self, ) -> listOfInstrumentStrategies: - raise NotImplementedError def get_optimal_position_as_df_for_instrument_strategy( self, instrument_strategy: instrumentStrategy ) -> pd.DataFrame: - raise NotImplementedError def write_optimal_position_as_df_for_instrument_strategy_without_checking( @@ -191,5 +184,4 @@ def write_optimal_position_as_df_for_instrument_strategy_without_checking( instrument_strategy: instrumentStrategy, optimal_positions_as_df: pd.DataFrame, ) -> pd.DataFrame: - raise NotImplementedError diff --git a/sysdata/production/position_limits.py b/sysdata/production/position_limits.py index 2fd4765b20..f0026d1640 100644 --- a/sysdata/production/position_limits.py +++ b/sysdata/production/position_limits.py @@ -27,7 +27,6 @@ def __init__(self, log=get_logger("Overrides")): def get_position_limit_object_for_instrument_strategy( self, instrument_strategy: instrumentStrategy ) -> positionLimitForStrategyInstrument: - try: position_limit = self._get_abs_position_limit_for_instrument_strategy( instrument_strategy @@ -46,7 +45,6 @@ def get_position_limit_object_for_instrument_strategy( def get_position_limit_object_for_instrument( self, instrument_code: str ) -> positionLimitForInstrument: - try: position_limit = self._get_abs_position_limit_for_instrument( instrument_code @@ -79,7 +77,6 @@ def get_all_instruments_with_limits(self) -> list: raise NotImplementedError def get_all_instrument_strategies_with_limits(self) -> listOfInstrumentStrategies: - raise NotImplementedError def set_position_limit_for_instrument_strategy( @@ -98,5 +95,4 @@ def delete_position_limit_for_instrument_strategy( raise NotImplementedError def delete_position_limit_for_instrument(self, instrument_code: str): - raise NotImplementedError diff --git a/sysdata/production/process_control_data.py b/sysdata/production/process_control_data.py index ec2615c642..256af47e08 100644 --- a/sysdata/production/process_control_data.py +++ b/sysdata/production/process_control_data.py @@ -101,7 +101,6 @@ def check_if_should_pause_process(self, process_name: str) -> bool: return result def check_if_pid_running_and_if_not_finish_all_processes(self): - list_of_names = self.get_list_of_process_names() list_of_results = [ self.check_if_pid_running_and_if_not_finish(process_name) @@ -123,7 +122,6 @@ def check_if_pid_running_and_if_not_finish(self, process_name: str): self._update_control_for_process_name(process_name, original_process) def finish_all_processes(self): - list_of_names = self.get_list_of_process_names() for process_name in list_of_names: try: diff --git a/sysdata/production/trade_limits.py b/sysdata/production/trade_limits.py index 600008de3f..2de61ffec7 100644 --- a/sysdata/production/trade_limits.py +++ b/sysdata/production/trade_limits.py @@ -158,7 +158,6 @@ def reset_instrument_limit(self, instrument_code: str, period_days: int): def reset_strategy_limit_all_instruments( self, strategy_name: str, period_days: int ): - pass def reset_instrument_strategy_limit( @@ -190,7 +189,6 @@ def _get_trade_limit_object_from_isd_key( def _get_trade_limit_object( self, instrument_strategy: instrumentStrategy, period_days: int ) -> tradeLimit: - try: trade_limit_as_dict = self._get_trade_limit_as_dict_or_missing_data( instrument_strategy, period_days diff --git a/sysdata/sim/csv_futures_sim_data.py b/sysdata/sim/csv_futures_sim_data.py index d1fb54ae65..8fb1c6a2f1 100755 --- a/sysdata/sim/csv_futures_sim_data.py +++ b/sysdata/sim/csv_futures_sim_data.py @@ -25,7 +25,6 @@ class csvFuturesSimData(genericBlobUsingFuturesSimData): def __init__( self, csv_data_paths=arg_not_supplied, log=get_logger("csvFuturesSimData") ): - data = dataBlob( log=log, csv_data_paths=csv_data_paths, diff --git a/sysdata/sim/csv_futures_sim_test_data.py b/sysdata/sim/csv_futures_sim_test_data.py index 5d7dca4903..9dec5232fe 100644 --- a/sysdata/sim/csv_futures_sim_test_data.py +++ b/sysdata/sim/csv_futures_sim_test_data.py @@ -32,7 +32,6 @@ class CsvFuturesSimTestData(genericBlobUsingFuturesSimData): def __init__( self, start_date=None, end_date=None, log=get_logger("csvFuturesSimTestData") ): - data = dataBlob( log=log, csv_data_paths=dict( diff --git a/sysdata/sim/db_futures_sim_data.py b/sysdata/sim/db_futures_sim_data.py index 4f7e971c45..8ebaf92835 100644 --- a/sysdata/sim/db_futures_sim_data.py +++ b/sysdata/sim/db_futures_sim_data.py @@ -28,7 +28,6 @@ class dbFuturesSimData(genericBlobUsingFuturesSimData): def __init__( self, data: dataBlob = arg_not_supplied, log=get_logger("dbFuturesSimData") ): - if data is arg_not_supplied: data = dataBlob( log=log, @@ -38,7 +37,7 @@ def __init__( get_class_for_data_type(FX_DATA), get_class_for_data_type(FUTURES_INSTRUMENT_DATA), get_class_for_data_type(ROLL_PARAMETERS_DATA), - get_class_for_data_type(STORED_SPREAD_DATA) + get_class_for_data_type(STORED_SPREAD_DATA), ], ) @@ -58,23 +57,21 @@ def __repr__(self): FUTURES_INSTRUMENT_DATA = "futures_instrument_data" STORED_SPREAD_DATA = "stored_spread_data" -def get_class_for_data_type(data_type:str): +def get_class_for_data_type(data_type: str): return use_sim_classes[data_type] + use_sim_classes = { FX_DATA: parquetFxPricesData, ROLL_PARAMETERS_DATA: csvRollParametersData, FUTURES_INSTRUMENT_DATA: csvFuturesInstrumentData, - FUTURES_MULTIPLE_PRICE_DATA: parquetFuturesMultiplePricesData, FUTURES_ADJUSTED_PRICE_DATA: parquetFuturesAdjustedPricesData, - STORED_SPREAD_DATA: mongoSpreadCostData + STORED_SPREAD_DATA: mongoSpreadCostData, } - - if __name__ == "__main__": import doctest diff --git a/sysdata/sim/sim_data.py b/sysdata/sim/sim_data.py index 0855134eff..3d30ba4458 100644 --- a/sysdata/sim/sim_data.py +++ b/sysdata/sim/sim_data.py @@ -295,7 +295,6 @@ def _get_fx_data_from_start_date( def _resolve_start_date(sim_data: simData): - try: config = _resolve_config(sim_data) except missingData: diff --git a/sysdata/tools/cleaner.py b/sysdata/tools/cleaner.py index 026b91aba3..c1d104340e 100644 --- a/sysdata/tools/cleaner.py +++ b/sysdata/tools/cleaner.py @@ -31,7 +31,6 @@ def apply_price_cleaning( cleaning_config=arg_not_supplied, daily_data: bool = True, ): - cleaning_config = get_config_for_price_filtering( data=data, cleaning_config=cleaning_config ) @@ -102,7 +101,6 @@ def apply_price_cleaning( def get_config_for_price_filtering( data: dataBlob, cleaning_config: priceFilterConfig = arg_not_supplied ) -> priceFilterConfig: - if cleaning_config is not arg_not_supplied: ## override return cleaning_config diff --git a/sysdata/tools/manual_price_checker.py b/sysdata/tools/manual_price_checker.py index 424fb5a8d5..4577989da5 100644 --- a/sysdata/tools/manual_price_checker.py +++ b/sysdata/tools/manual_price_checker.py @@ -54,7 +54,6 @@ def manual_price_checker( data_iterating = True while data_iterating: - if only_add_rows: merged_data_with_status = merge_newer_data_no_checks(old_data, new_data) else: diff --git a/sysexecution/algos/algo.py b/sysexecution/algos/algo.py index 86125e5a01..d5dbc83583 100644 --- a/sysexecution/algos/algo.py +++ b/sysexecution/algos/algo.py @@ -84,7 +84,6 @@ def get_and_submit_broker_order_for_contract_order( ticker_object: tickerObject = None, broker_account: str = arg_not_supplied, ): - log = contract_order.log_with_attributes(self.data.log) broker = self.data_broker.get_broker_name() @@ -195,7 +194,6 @@ def set_limit_price( input_limit_price: float = None, limit_price_from: str = limit_price_from_input, ) -> float: - assert limit_price_from in sources_of_limit_price if limit_price_from == limit_price_from_input: diff --git a/sysexecution/algos/algo_adaptive.py b/sysexecution/algos/algo_adaptive.py index 931621c259..f3867bfcd0 100644 --- a/sysexecution/algos/algo_adaptive.py +++ b/sysexecution/algos/algo_adaptive.py @@ -3,7 +3,6 @@ class algoAdaptiveMkt(algoMarket): - # Adaptive market orders should eventually execute, but might take a while # This allows re-using the market order trade management logic, but without timing out ORDER_TIME_OUT = float("inf") diff --git a/sysexecution/algos/algo_original_best.py b/sysexecution/algos/algo_original_best.py index 1256f098f8..6732b28921 100644 --- a/sysexecution/algos/algo_original_best.py +++ b/sysexecution/algos/algo_original_best.py @@ -69,7 +69,6 @@ def submit_trade(self) -> orderWithControls: def manage_trade( self, placed_broker_order_with_controls: orderWithControls ) -> orderWithControls: - data = self.data placed_broker_order_with_controls = self.manage_live_trade( placed_broker_order_with_controls @@ -81,7 +80,6 @@ def manage_trade( return placed_broker_order_with_controls def prepare_and_submit_trade(self) -> orderWithControls: - data = self.data contract_order = self.contract_order log = contract_order.log_with_attributes(data.log) @@ -120,7 +118,6 @@ def prepare_and_submit_trade(self) -> orderWithControls: return missing_order if okay_to_do_limit_trade: - # create and issue limit order broker_order_with_controls = ( self.get_and_submit_broker_order_for_contract_order( @@ -146,7 +143,6 @@ def prepare_and_submit_trade(self) -> orderWithControls: def manage_live_trade( self, broker_order_with_controls_and_order_id: orderWithControls ) -> orderWithControls: - data = self.data log = broker_order_with_controls_and_order_id.order.log_with_attributes( data.log @@ -229,7 +225,6 @@ def limit_trade_viable( ticker_object: tickerObject, log: pst_logger, ) -> bool: - # no point doing limit order if we've got imbalanced size issues, as we'd # switch to aggressive immediately raise_adverse_size_issue = adverse_size_issue( @@ -242,7 +237,6 @@ def limit_trade_viable( # or if not enough time left if is_market_about_to_close(data_broker=data_broker, order=order, log=log): - log.debug( "Market about to close or stack handler nearly close - doing market order" ) @@ -267,7 +261,6 @@ def file_log_report( def file_log_report_limit_order( log, is_aggressive: bool, broker_order_with_controls: orderWithControls ): - if is_aggressive: agg_txt = "Aggressive" else: @@ -337,7 +330,6 @@ def is_market_about_to_close( order: Union[brokerOrder, contractOrder, orderWithControls], log: pst_logger, ) -> bool: - try: short_of_time = data_broker.less_than_N_hours_of_trading_left_for_contract( order.futures_contract, diff --git a/sysexecution/algos/allocate_algo_to_order.py b/sysexecution/algos/allocate_algo_to_order.py index 17ed31fbae..575f3273cf 100644 --- a/sysexecution/algos/allocate_algo_to_order.py +++ b/sysexecution/algos/allocate_algo_to_order.py @@ -80,7 +80,6 @@ def check_and_if_required_allocate_algo_to_single_contract_order( contract_order: contractOrder, instrument_order: instrumentOrder, ) -> contractOrder: - config = get_algo_allocation_config(data) log = contract_order.log_with_attributes(data.log) @@ -140,7 +139,6 @@ def already_has_algo_allocated(contract_order: contractOrder) -> bool: def algo_allocation_is_overriden_for_instrument( contract_order: contractOrder, config: AlgoConfig ) -> bool: - instrument_code = contract_order.instrument_code instruments_with_keys = list(config.algo_overrides.keys()) diff --git a/sysexecution/algos/common_functions.py b/sysexecution/algos/common_functions.py index a8411952d4..07e9a2294e 100644 --- a/sysexecution/algos/common_functions.py +++ b/sysexecution/algos/common_functions.py @@ -33,7 +33,6 @@ def post_trade_processing( def cancel_order( data: dataBlob, broker_order_with_controls: orderWithControls ) -> orderWithControls: - log = broker_order_with_controls.order.log_with_attributes(data.log) data_broker = dataBroker(data) data_broker.cancel_order_given_control_object(broker_order_with_controls) @@ -62,7 +61,6 @@ def set_limit_price( broker_order_with_controls: orderWithControls, new_limit_price: float, ): - log = broker_order_with_controls.order.log_with_attributes(data_broker.data.log) try: @@ -106,7 +104,6 @@ def check_current_limit_price_at_inside_spread( def file_log_report_market_order(log, broker_order_with_controls: orderWithControls): - ticker_object = broker_order_with_controls.ticker current_tick = str(ticker_object.current_tick()) diff --git a/sysexecution/order_stacks/broker_order_stack.py b/sysexecution/order_stacks/broker_order_stack.py index db6198bdb9..52a4de8925 100644 --- a/sysexecution/order_stacks/broker_order_stack.py +++ b/sysexecution/order_stacks/broker_order_stack.py @@ -53,7 +53,6 @@ def __init__( control_object, ticker_object: tickerObject = None, ): - self._order = broker_order self._control_object = control_object self._ticker = ticker_object diff --git a/sysexecution/order_stacks/contract_order_stack.py b/sysexecution/order_stacks/contract_order_stack.py index 77d27d1e3f..3d6704c6ed 100644 --- a/sysexecution/order_stacks/contract_order_stack.py +++ b/sysexecution/order_stacks/contract_order_stack.py @@ -43,7 +43,6 @@ def add_controlling_algo_ref(self, order_id: int, control_algo_ref: str): raise Exception(error_msg) def release_order_from_algo_control(self, order_id: int): - existing_order = self.get_order_with_id_from_stack(order_id) if existing_order is missing_order: error_msg = "Can't add controlling ago as order %d doesn't exist" % order_id diff --git a/sysexecution/order_stacks/instrument_order_stack.py b/sysexecution/order_stacks/instrument_order_stack.py index 0b44ff7255..d87e004b72 100644 --- a/sysexecution/order_stacks/instrument_order_stack.py +++ b/sysexecution/order_stacks/instrument_order_stack.py @@ -148,7 +148,6 @@ def _put_adjusting_order_on_stack( def calculate_adjusted_order_given_existing_orders( new_order: instrumentOrder, existing_orders: listOfOrders, log ): - desired_new_trade = new_order.trade ( existing_trades, diff --git a/sysexecution/order_stacks/order_stack.py b/sysexecution/order_stacks/order_stack.py index b53345dab7..88e17177ce 100644 --- a/sysexecution/order_stacks/order_stack.py +++ b/sysexecution/order_stacks/order_stack.py @@ -221,7 +221,6 @@ def is_completed( allow_zero_completions=False, treat_inactive_as_complete=False, ) -> bool: - existing_order = self.get_order_with_id_from_stack(order_id) if allow_zero_completions: @@ -295,7 +294,6 @@ def remove_children_from_order(self, order_id: int): self._change_order_on_stack(order_id, new_order) def mark_as_manual_fill_for_order_id(self, order_id: int): - order = self.get_order_with_id_from_stack(order_id) order.manual_fill = True self._change_order_on_stack(order_id, order) @@ -308,7 +306,6 @@ def change_fill_quantity_for_order( filled_price: float = None, fill_datetime: datetime.datetime = None, ): - existing_order = self.get_order_with_id_from_stack(order_id) if existing_order is missing_order: error_msg = "Can't apply fill to non existent order %d" % order_id @@ -503,7 +500,6 @@ def _get_list_of_orderids_with_same_tradeable_object_on_stack( def _get_list_of_order_ids_with_key_from_stack( self, order_key: str, exclude_inactive_orders: bool = True ) -> list: - all_order_ids = self.get_list_of_order_ids( exclude_inactive_orders=exclude_inactive_orders ) diff --git a/sysexecution/orders/base_orders.py b/sysexecution/orders/base_orders.py index 415cbd2d46..e178f01d53 100644 --- a/sysexecution/orders/base_orders.py +++ b/sysexecution/orders/base_orders.py @@ -158,7 +158,6 @@ def as_single_trade_qty_or_error(self) -> int: def replace_required_trade_size_only_use_for_unsubmitted_trades( self, new_trade: tradeQuantity ): - # ensure refactoring works assert type(new_trade) is tradeQuantity @@ -308,7 +307,6 @@ def change_trade_size_proportionally_to_meet_abs_qty_limit(self, max_abs_qty: in def reduce_trade_size_proportionally_so_smallest_leg_is_max_size( self, max_size: int ): - new_order = copy(self) old_trade = new_order.trade new_trade = ( @@ -486,7 +484,6 @@ def resolve_parent(parent: int): def resolve_multi_leg_price_to_single_price( trade_list: tradeQuantity, price_list: list ) -> float: - if len(price_list) == 0: ## This will be the case when an order is first created or has no fills return None diff --git a/sysexecution/orders/broker_orders.py b/sysexecution/orders/broker_orders.py index de0667e484..ff36b75f15 100644 --- a/sysexecution/orders/broker_orders.py +++ b/sysexecution/orders/broker_orders.py @@ -394,7 +394,6 @@ def create_new_broker_order_from_contract_order( broker_permid: str = "", broker_tempid: str = "", ) -> brokerOrder: - broker_order = brokerOrder( contract_order.key, contract_order.trade, @@ -428,7 +427,6 @@ def create_augemented_order( instrument_order: instrumentOrder, contract_order: contractOrder, ): - # Price when the trade was generated. We use the contract order price since # the instrument order price may refer to a different contract order.parent_reference_price = contract_order.reference_price diff --git a/sysexecution/orders/contract_orders.py b/sysexecution/orders/contract_orders.py index 83b37dd05c..b60262e0e9 100644 --- a/sysexecution/orders/contract_orders.py +++ b/sysexecution/orders/contract_orders.py @@ -328,7 +328,6 @@ def sort_inputs_by_contract_date_order(self): def from_contract_order_args_to_resolved_args( args: tuple, fill: tradeQuantity ) -> contractOrderKeyArguments: - # different ways of specifying tradeable object key_arguments = split_contract_order_args(args, fill) diff --git a/sysexecution/orders/list_of_orders.py b/sysexecution/orders/list_of_orders.py index 8f08b076e0..b778cef408 100644 --- a/sysexecution/orders/list_of_orders.py +++ b/sysexecution/orders/list_of_orders.py @@ -123,7 +123,6 @@ def all_zero_fills(self) -> bool: def calculate_most_conservative_trade_from_list_of_orders_with_limits_applied( position: int, original_order: Order, list_of_orders: listOfOrders ) -> Order: - list_of_trade_qty = list_of_orders.list_of_qty() new_trade_qty = ( diff --git a/sysexecution/stack_handler/additional_sampling.py b/sysexecution/stack_handler/additional_sampling.py index 65e69f0fb9..1e0e550e02 100644 --- a/sysexecution/stack_handler/additional_sampling.py +++ b/sysexecution/stack_handler/additional_sampling.py @@ -38,7 +38,6 @@ def _get_all_instruments(self): return instrument_list def refresh_sampling_for_contract(self, contract: futuresContract): - okay_to_sample = self.is_contract_currently_okay_to_sample(contract) if not okay_to_sample: return None @@ -74,7 +73,6 @@ def get_average_spread(self, contract: futuresContract) -> float: return average_spread def add_spread_data_to_db(self, contract: futuresContract, average_spread: float): - ## we store by instrument instrument_code = contract.instrument_code update_prices = self.update_prices diff --git a/sysexecution/stack_handler/balance_trades.py b/sysexecution/stack_handler/balance_trades.py index 3022898dbd..f5c40efad3 100644 --- a/sysexecution/stack_handler/balance_trades.py +++ b/sysexecution/stack_handler/balance_trades.py @@ -83,7 +83,6 @@ def put_balance_trades_on_stack( instrument_order_id, [contract_order_id] ) except Exception as e: - log.error("Couldn't add children to instrument order error %s" % str(e)) log.error("Rolling back") self.rollback_balance_trades( @@ -122,7 +121,6 @@ def put_balance_trades_on_stack( def rollback_balance_trades( self, instrument_order_id: int, contract_order_id: int, broker_order_id: int ): - if instrument_order_id is not missing_order: self.instrument_stack.remove_order_with_id_from_stack(instrument_order_id) if contract_order_id is not missing_order: diff --git a/sysexecution/stack_handler/cancel_and_modify.py b/sysexecution/stack_handler/cancel_and_modify.py index 1615544620..8c59e2611e 100644 --- a/sysexecution/stack_handler/cancel_and_modify.py +++ b/sysexecution/stack_handler/cancel_and_modify.py @@ -76,7 +76,6 @@ def cancel_broker_order_with_id_and_return_order( def are_all_orders_cancelled_after_timeout( self, list_of_broker_orders: listOfOrders, wait_time_seconds: int = 60 ) -> listOfOrders: - timer = quickTimer(wait_time_seconds) while timer.unfinished: list_of_broker_orders = self.list_of_orders_not_yet_cancelled( @@ -110,7 +109,6 @@ def list_of_orders_not_yet_cancelled( return new_list_of_orders def check_order_cancelled(self, broker_order: brokerOrder) -> bool: - data_broker = self.data_broker order_is_cancelled = data_broker.check_order_is_cancelled(broker_order) diff --git a/sysexecution/stack_handler/checks.py b/sysexecution/stack_handler/checks.py index f97836250d..ae3cafeefe 100644 --- a/sysexecution/stack_handler/checks.py +++ b/sysexecution/stack_handler/checks.py @@ -16,7 +16,6 @@ class stackHandlerChecks(stackHandlerCore): # We do these regularly, but also at the end of the day (daily reporting) def check_internal_position_break(self): - diag_positions = diagPositions(self.data) breaks = ( diag_positions.get_list_of_breaks_between_contract_and_strategy_positions() diff --git a/sysexecution/stack_handler/completed_orders.py b/sysexecution/stack_handler/completed_orders.py index 56f33bd95e..8c247b5ab5 100644 --- a/sysexecution/stack_handler/completed_orders.py +++ b/sysexecution/stack_handler/completed_orders.py @@ -62,7 +62,6 @@ def handle_completed_instrument_order( def get_order_family_for_instrument_order_id( self, instrument_order_id: int ) -> orderFamily: - instrument_order = self.instrument_stack.get_order_with_id_from_stack( instrument_order_id ) @@ -112,7 +111,6 @@ def confirm_all_children_and_grandchildren_are_filled( allow_zero_completions=False, treat_inactive_as_complete=True, ): - order_family = self.get_order_family_for_instrument_order_id( instrument_order_id ) @@ -164,7 +162,6 @@ def check_list_of_broker_orders_complete( allow_zero_completions=False, treat_inactive_as_complete=False, ): - for broker_order_id in list_of_broker_order_id: completely_filled = self.broker_stack.is_completed( broker_order_id, @@ -179,7 +176,6 @@ def check_list_of_broker_orders_complete( return True def add_order_family_to_historic_orders_database(self, order_family: orderFamily): - instrument_order = self.instrument_stack.get_order_with_id_from_stack( order_family.instrument_order_id ) @@ -197,7 +193,6 @@ def add_order_family_to_historic_orders_database(self, order_family: orderFamily ) def deactivate_family_of_orders(self, order_family: orderFamily): - # Make orders inactive # A subsequent process will delete them self.instrument_stack.deactivate_order(order_family.instrument_order_id) diff --git a/sysexecution/stack_handler/create_broker_orders_from_contract_orders.py b/sysexecution/stack_handler/create_broker_orders_from_contract_orders.py index 02b3fa39ef..26df6aa4a9 100644 --- a/sysexecution/stack_handler/create_broker_orders_from_contract_orders.py +++ b/sysexecution/stack_handler/create_broker_orders_from_contract_orders.py @@ -39,11 +39,9 @@ def create_broker_orders_from_contract_orders(self): """ list_of_contract_order_ids = self.contract_stack.get_list_of_order_ids() for contract_order_id in list_of_contract_order_ids: - self.create_broker_order_for_contract_order(contract_order_id) def create_broker_order_for_contract_order(self, contract_order_id: int): - original_contract_order = self.contract_stack.get_order_with_id_from_stack( contract_order_id ) @@ -74,7 +72,6 @@ def create_broker_order_for_contract_order(self, contract_order_id: int): ) if algo_instance.blocking_algo_requires_management: - completed_broker_order_with_controls = algo_instance.manage_trade( broker_order_with_controls_and_order_id ) @@ -87,7 +84,6 @@ def create_broker_order_for_contract_order(self, contract_order_id: int): def preprocess_contract_order( self, original_contract_order: contractOrder ) -> contractOrder: - if original_contract_order is missing_order: # weird race condition return missing_order @@ -192,7 +188,6 @@ def apply_trade_limits_to_contract_order( def liquidity_size_contract_order( self, contract_order_after_trade_limits: contractOrder ) -> contractOrder: - data_broker = self.data_broker log = contract_order_after_trade_limits.log_with_attributes(self.log) @@ -222,7 +217,6 @@ def liquidity_size_contract_order( def send_to_algo( self, contract_order_to_trade: contractOrder ) -> (Algo, orderWithControls): - log = contract_order_to_trade.log_with_attributes(self.log) instrument_order = self.get_parent_of_contract_order(contract_order_to_trade) @@ -328,7 +322,6 @@ def add_trade_to_database( def post_trade_processing( self, completed_broker_order_with_controls: orderWithControls ): - broker_order = completed_broker_order_with_controls.order # update trade limits @@ -347,7 +340,6 @@ def post_trade_processing( ) def add_trade_to_trade_limits(self, executed_order: brokerOrder): - data_trade_limits = dataTradeLimits(self.data) data_trade_limits.add_trade(executed_order) diff --git a/sysexecution/stack_handler/fills.py b/sysexecution/stack_handler/fills.py index 2c93ac6766..4f76ebfd7b 100644 --- a/sysexecution/stack_handler/fills.py +++ b/sysexecution/stack_handler/fills.py @@ -40,7 +40,6 @@ def pass_fills_from_broker_to_broker_stack(self): self.apply_broker_fill_from_broker_to_broker_database(broker_order_id) def apply_broker_fill_from_broker_to_broker_database(self, broker_order_id: int): - db_broker_order = self.broker_stack.get_order_with_id_from_stack( broker_order_id ) @@ -72,7 +71,6 @@ def apply_broker_fill_from_broker_to_broker_database(self, broker_order_id: int) def apply_broker_order_fills_to_database( self, broker_order_id: int, broker_order: brokerOrder ): - # Turn commissions into floats data_broker = dataBroker(self.data) broker_order_with_commissions = ( @@ -163,7 +161,6 @@ def apply_fills_to_contract_order( filled_price: float, fill_datetime: datetime.datetime, ): - contract_order_id = contract_order_before_fill.order_id self.contract_stack.change_fill_quantity_for_order( contract_order_id, @@ -208,7 +205,6 @@ def apply_position_change_to_stored_contract_positions( ) def apply_contract_fill_to_instrument_order(self, contract_order_id: int): - contract_order = self.contract_stack.get_order_with_id_from_stack( contract_order_id ) @@ -233,7 +229,6 @@ def apply_contract_fill_to_instrument_order(self, contract_order_id: int): ) def apply_contract_fills_for_instrument_order(self, instrument_order_id: int): - instrument_order = self.instrument_stack.get_order_with_id_from_stack( instrument_order_id ) @@ -371,7 +366,6 @@ def fill_for_instrument_in_database( fill_price: float, fill_datetime: datetime.datetime, ): - # if fill has changed then update positions self.apply_position_change_to_instrument(original_instrument_order, fill_qty) @@ -407,7 +401,6 @@ def apply_position_change_to_instrument( def check_to_see_if_distributed_order( instrument_order: instrumentOrder, contract_orders: listOfOrders ) -> bool: - trade_instrument_order = instrument_order.trade trade_contract_orders = [order.trade for order in contract_orders] diff --git a/sysexecution/stack_handler/roll_orders.py b/sysexecution/stack_handler/roll_orders.py index e5297c0fe0..36fd6caaac 100644 --- a/sysexecution/stack_handler/roll_orders.py +++ b/sysexecution/stack_handler/roll_orders.py @@ -75,7 +75,6 @@ def generate_force_roll_orders_for_instrument_without_checking( ) def check_roll_required_and_safe(self, instrument_code: str) -> bool: - roll_orders_required_from_positions = ( self.check_if_positions_require_order_generation(instrument_code) ) @@ -139,7 +138,6 @@ def check_if_order_required_given_roll_state_is_appropriate( return order_generation_is_appropriate def check_if_order_required_in_double_sided_roll_state(self, instrument_code: str): - ## Double sided, so we will only do if there is no reducing order on the stack has_reducing_order = has_reducing_instrument_order_on_stack( data=self.data, @@ -202,7 +200,6 @@ def check_and_warn_if_order_for_instrument_already_on_contract_stack( def add_instrument_and_list_of_contract_orders_to_stack( self, instrument_order: instrumentOrder, list_of_contract_orders: listOfOrders ): - instrument_stack = self.instrument_stack contract_stack = self.contract_stack parent_log = instrument_order.log_with_attributes(self.log) @@ -472,17 +469,21 @@ def get_strategy_name_with_largest_position_for_instrument( ) try: - strategy_name = all_instrument_positions.strategy_name_with_largest_abs_position_for_instrument( - instrument_code - ) + strategy_name = all_instrument_positions.strategy_name_with_largest_abs_position_for_instrument( + instrument_code + ) except: - ## corner case where nets out to 0 - strategies = diag_positions.get_list_of_strategies_with_positions() - strategy_name = strategies[0] - data.log.debug("No strategies have net positions in %s, using arbitrary strategy %s" % (instrument_code, strategy_name)) + ## corner case where nets out to 0 + strategies = diag_positions.get_list_of_strategies_with_positions() + strategy_name = strategies[0] + data.log.debug( + "No strategies have net positions in %s, using arbitrary strategy %s" + % (instrument_code, strategy_name) + ) return strategy_name + def create_contract_roll_orders( data: dataBlob, roll_spread_info: rollSpreadInformation, @@ -540,7 +541,6 @@ def create_contract_orders_close_first_contract( def create_contract_orders_outright( roll_spread_info: rollSpreadInformation, ) -> listOfOrders: - strategy = ROLL_PSEUDO_STRATEGY first_order = contractOrder( @@ -568,7 +568,6 @@ def create_contract_orders_outright( def create_contract_orders_spread( roll_spread_info: rollSpreadInformation, ) -> listOfOrders: - strategy = ROLL_PSEUDO_STRATEGY contract_id_list = [ roll_spread_info.priced_contract_id, diff --git a/sysexecution/stack_handler/spawn_children_from_instrument_orders.py b/sysexecution/stack_handler/spawn_children_from_instrument_orders.py index 968ef1a0c3..33716ee7e3 100644 --- a/sysexecution/stack_handler/spawn_children_from_instrument_orders.py +++ b/sysexecution/stack_handler/spawn_children_from_instrument_orders.py @@ -45,7 +45,6 @@ def spawn_children_from_new_instrument_orders(self): self.spawn_children_from_instrument_order_id(instrument_order_id) def spawn_children_from_instrument_order_id(self, instrument_order_id: int): - instrument_order = self.instrument_stack.get_order_with_id_from_stack( instrument_order_id ) @@ -84,7 +83,6 @@ def add_children_to_stack_and_child_id_to_parent( parent_order: Order, list_of_child_orders: listOfOrders, ): - parent_log = parent_order.log_with_attributes(self.log) list_of_child_ids = put_children_on_stack( @@ -315,7 +313,6 @@ def passive_roll_child_order( data: dataBlob, instrument_order: instrumentOrder, ) -> list: - log = instrument_order.log_with_attributes(data.log) diag_positions = diagPositions(data) instrument_code = instrument_order.instrument_code @@ -412,7 +409,6 @@ def passive_trade_split_over_two_contracts( def list_of_contract_orders_from_list_of_child_date_and_trade( instrument_order: instrumentOrder, list_of_child_contract_dates_and_trades: list ) -> listOfOrders: - list_of_contract_orders = [ contract_order_for_direct_instrument_child_date_and_trade( instrument_order, child_date_and_trade diff --git a/sysexecution/stack_handler/stackHandlerCore.py b/sysexecution/stack_handler/stackHandlerCore.py index 7f7c6b83ec..fef9e551c4 100644 --- a/sysexecution/stack_handler/stackHandlerCore.py +++ b/sysexecution/stack_handler/stackHandlerCore.py @@ -101,7 +101,6 @@ def put_children_on_stack( list_of_child_orders: listOfOrders, parent_log, ) -> list: - try: list_of_child_ids = child_stack.put_list_of_orders_on_stack( list_of_child_orders @@ -158,7 +157,6 @@ def log_successful_adding( parent_order: Order, parent_log, ): - for child_order, child_id in zip(list_of_child_orders, list_of_child_ids): child_log = child_order.log_with_attributes(parent_log) child_log.debug( @@ -175,7 +173,6 @@ def rollback_parents_and_children_and_handle_exceptions( parent_log, error_from_adding_child_orders: Exception, ): - ## try: rollback_parents_and_children( @@ -205,7 +202,6 @@ def rollback_parents_and_children( parent_order_id: int, list_of_child_order_ids: list, ): - ## parent order might be locked parent_stack.unlock_order_on_stack(parent_order_id) parent_stack.deactivate_order(parent_order_id) diff --git a/sysexecution/strategies/classic_buffered_positions.py b/sysexecution/strategies/classic_buffered_positions.py index 9bd0ff6613..2a10405c70 100644 --- a/sysexecution/strategies/classic_buffered_positions.py +++ b/sysexecution/strategies/classic_buffered_positions.py @@ -124,7 +124,6 @@ def list_of_trades_given_optimal_and_actual_positions( optimal_positions: optimalPositions, actual_positions: dict, ) -> listOfOrders: - upper_positions = optimal_positions.upper_positions list_of_instruments = upper_positions.keys() trade_list = [ @@ -146,7 +145,6 @@ def trade_given_optimal_and_actual_positions( optimal_positions: optimalPositions, actual_positions: dict, ) -> instrumentOrder: - upper_for_instrument = optimal_positions.upper_positions[instrument_code] lower_for_instrument = optimal_positions.lower_positions[instrument_code] actual_for_instrument = actual_positions.get(instrument_code, 0.0) diff --git a/sysexecution/strategies/dynamic_optimised_positions.py b/sysexecution/strategies/dynamic_optimised_positions.py index d70bae364e..ff1cb75e30 100644 --- a/sysexecution/strategies/dynamic_optimised_positions.py +++ b/sysexecution/strategies/dynamic_optimised_positions.py @@ -146,7 +146,6 @@ def calculate_optimised_positions_data( strategy_name: str, raw_optimal_position_data: dict, ) -> dict: - data_for_objective = get_data_for_objective_instance( data, strategy_name=strategy_name, @@ -204,7 +203,6 @@ def get_data_for_objective_instance( previous_positions: dict, raw_optimal_position_data: dict, ) -> dataForObjectiveInstance: - list_of_instruments = list(raw_optimal_position_data.keys()) data.log.debug("Getting data for optimisation") @@ -293,7 +291,6 @@ def get_data_for_objective_instance( def get_maximum_position_contracts( data, strategy_name: str, list_of_instruments: list ) -> portfolioWeights: - maximum_position_contracts = dict( [ ( @@ -315,7 +312,6 @@ def get_maximum_position_contracts( def get_maximum_position_contracts_for_instrument_strategy( data: dataBlob, instrument_strategy: instrumentStrategy ) -> int: - override = get_override_for_instrument_strategy(data, instrument_strategy) if override == CLOSE_OVERRIDE: return 0 @@ -336,7 +332,6 @@ def get_maximum_position_contracts_for_instrument_strategy( def get_per_contract_values( data: dataBlob, strategy_name: str, list_of_instruments: list ) -> portfolioWeights: - per_contract_values = portfolioWeights( [ ( @@ -358,7 +353,6 @@ def calculate_costs_per_portfolio_weight( strategy_name: str, list_of_instruments: list, ) -> meanEstimates: - costs = meanEstimates( [ ( @@ -383,7 +377,6 @@ def get_cost_per_notional_weight_as_proportion_of_capital( strategy_name: str, instrument_code: str, ) -> float: - capital = capital_for_strategy(data, strategy_name=strategy_name) cost_per_contract = get_cash_cost_in_base_for_instrument( @@ -423,7 +416,6 @@ def get_constraints(data, strategy_name: str, list_of_instruments: list): def get_no_trade_keys( data: dataBlob, strategy_name: str, list_of_instruments: list ) -> list: - no_trade_keys = [ instrument_code for instrument_code in list_of_instruments @@ -460,7 +452,6 @@ def get_reduce_only_keys( def get_override_for_instrument_strategy( data: dataBlob, instrument_strategy: instrumentStrategy ) -> Override: - diag_overrides = diagOverrides(data) override = diag_overrides.get_cumulative_override_for_instrument_strategy( instrument_strategy @@ -472,7 +463,6 @@ def get_override_for_instrument_strategy( def get_covariance_matrix_for_instrument_returns_for_optimisation( data: dataBlob, list_of_instruments: list ) -> covarianceEstimate: - corr_matrix = get_correlation_matrix_for_instrument_returns( data, list_of_instruments ) @@ -539,7 +529,6 @@ def get_config_parameters(data: dataBlob) -> dict: def get_objective_instance( data: dataBlob, data_for_objective: dataForObjectiveInstance ) -> objectiveFunctionForGreedy: - objective_function = objectiveFunctionForGreedy( log=data.log, contracts_optimal=data_for_objective.positions_optimal, @@ -559,7 +548,6 @@ def get_optimised_positions_data_dict_given_optimisation( data_for_objective: dataForObjectiveInstance, objective_function: objectiveFunctionForGreedy, ) -> dict: - optimised_positions = objective_function.optimise_positions() optimised_positions = optimised_positions.replace_weights_with_ints() @@ -605,7 +593,6 @@ def get_optimised_positions_data_dict_given_optimisation( def get_positions_given_weights( weights: portfolioWeights, per_contract_value: portfolioWeights ) -> portfolioWeights: - positions = weights / per_contract_value positions = positions.replace_weights_with_ints() @@ -615,7 +602,6 @@ def get_positions_given_weights( def get_weights_given_positions( positions: portfolioWeights, per_contract_value: portfolioWeights ) -> portfolioWeights: - weights = positions * per_contract_value return weights @@ -659,7 +645,6 @@ def get_optimal_position_entry_with_calcs_for_code( def write_optimised_positions_data( data: dataBlob, strategy_name: str, optimised_positions_data: dict ): - for instrument_code, optimised_position_entry in optimised_positions_data.items(): write_optimised_positions_data_for_code( data, @@ -675,7 +660,6 @@ def write_optimised_positions_data_for_code( instrument_code: str, optimised_position_entry: optimalPositionWithDynamicCalculations, ): - data_optimal_positions = dataOptimalPositions(data) instrument_strategy = instrumentStrategy( instrument_code=instrument_code, strategy_name=strategy_name @@ -696,7 +680,6 @@ def list_of_trades_given_optimised_and_actual_positions( optimised_positions_data: dict, current_positions: dict, ) -> listOfOrders: - list_of_instruments = optimised_positions_data.keys() trade_list = [ trade_given_optimal_and_actual_positions( @@ -721,7 +704,6 @@ def trade_given_optimal_and_actual_positions( optimised_position_entry: optimalPositionWithDynamicCalculations, current_position: int, ) -> instrumentOrder: - optimised_position = optimised_position_entry.optimised_position trade_required = optimised_position - current_position diff --git a/sysexecution/strategies/strategy_order_handling.py b/sysexecution/strategies/strategy_order_handling.py index 36ea397274..0bead21989 100644 --- a/sysexecution/strategies/strategy_order_handling.py +++ b/sysexecution/strategies/strategy_order_handling.py @@ -27,7 +27,6 @@ class orderGeneratorForStrategy(object): """ def __init__(self, data: dataBlob, strategy_name: str): - self._strategy_name = strategy_name self._data = data data_orders = dataOrders(data) @@ -86,7 +85,6 @@ def get_actual_positions_for_strategy(self) -> dict: def apply_overrides_and_position_limits( self, order_list: listOfOrders ) -> listOfOrders: - new_order_list = [ self.apply_overrides_and_position_limits_for_instrument_and_strategy( proposed_order @@ -148,7 +146,6 @@ def apply_overrides_for_instrument_and_strategy( def adjust_order_for_position_limits( self, order: instrumentOrder ) -> instrumentOrder: - log = order.log_with_attributes(self.log) data_position_limits = dataPositionLimits(self.data) diff --git a/sysexecution/tick_data.py b/sysexecution/tick_data.py index 0e3635a8c4..1721f83cf2 100644 --- a/sysexecution/tick_data.py +++ b/sysexecution/tick_data.py @@ -42,7 +42,6 @@ def analyse_tick_data_frame( forward_fill: bool = False, replace_qty_nans=False, ): - if tick_data.is_empty(): raise missingData("Tick data is empty") @@ -262,7 +261,6 @@ def analyse_for_tick( def wait_for_valid_bid_and_ask_and_analyse_current_tick( self, qty: int = arg_not_supplied, wait_time_seconds: int = 10 ) -> oneTick: - current_tick = self.wait_for_valid_bid_and_ask_and_return_current_tick( wait_time_seconds=wait_time_seconds ) @@ -403,7 +401,6 @@ def get_next_n_ticks_from_ticker_object( def from_list_of_ticks_to_dataframe( list_of_ticks: List[oneTick], ) -> dataFrameOfRecentTicks: - fields = TICK_REQUIRED_COLUMNS value_dict = {} diff --git a/sysinit/configtools/csvweights_to_yaml.py b/sysinit/configtools/csvweights_to_yaml.py index d740c2dda1..744e006195 100644 --- a/sysinit/configtools/csvweights_to_yaml.py +++ b/sysinit/configtools/csvweights_to_yaml.py @@ -54,7 +54,6 @@ def forecast_weights_by_instrument_csv_to_yaml(filename_input, filename_output): my_config = {} for instrument in data_instruments: - data_weights = data[instrument].values my_config[instrument] = dict( [ diff --git a/sysinit/futures/adjustedprices_from_db_multiple_to_db.py b/sysinit/futures/adjustedprices_from_db_multiple_to_db.py index e3e66b2e12..5805fa6686 100755 --- a/sysinit/futures/adjustedprices_from_db_multiple_to_db.py +++ b/sysinit/futures/adjustedprices_from_db_multiple_to_db.py @@ -13,6 +13,7 @@ diag_prices = diagPrices() + def _get_data_inputs(csv_adj_data_path): db_multiple_prices = diag_prices.db_futures_multiple_prices_data db_adjusted_prices = diag_prices.db_futures_adjusted_prices_data @@ -21,17 +22,28 @@ def _get_data_inputs(csv_adj_data_path): return db_multiple_prices, db_adjusted_prices, csv_adjusted_prices -def process_adjusted_prices_all_instruments(csv_adj_data_path=arg_not_supplied, ADD_TO_DB=True, ADD_TO_CSV=False): +def process_adjusted_prices_all_instruments( + csv_adj_data_path=arg_not_supplied, ADD_TO_DB=True, ADD_TO_CSV=False +): db_multiple_prices, _notused, _alsonotused = _get_data_inputs(csv_adj_data_path) instrument_list = db_multiple_prices.get_list_of_instruments() for instrument_code in instrument_list: print(instrument_code) - process_adjusted_prices_single_instrument(instrument_code, csv_adj_data_path=csv_adj_data_path, - ADD_TO_DB=ADD_TO_DB, ADD_TO_CSV=ADD_TO_CSV) + process_adjusted_prices_single_instrument( + instrument_code, + csv_adj_data_path=csv_adj_data_path, + ADD_TO_DB=ADD_TO_DB, + ADD_TO_CSV=ADD_TO_CSV, + ) -def process_adjusted_prices_single_instrument(instrument_code, csv_adj_data_path=arg_not_supplied, - multiple_prices=arg_not_supplied, ADD_TO_DB=True, ADD_TO_CSV=False): +def process_adjusted_prices_single_instrument( + instrument_code, + csv_adj_data_path=arg_not_supplied, + multiple_prices=arg_not_supplied, + ADD_TO_DB=True, + ADD_TO_CSV=False, +): ( arctic_multiple_prices, parquet_adjusted_prices, @@ -60,4 +72,6 @@ def process_adjusted_prices_single_instrument(instrument_code, csv_adj_data_path if __name__ == "__main__": input("Will overwrite existing prices are you sure?! CTL-C to abort") # modify flags and datapath as required - process_adjusted_prices_all_instruments(csv_adj_data_path=arg_not_supplied, ADD_TO_DB=True, ADD_TO_CSV=True) + process_adjusted_prices_all_instruments( + csv_adj_data_path=arg_not_supplied, ADD_TO_DB=True, ADD_TO_CSV=True + ) diff --git a/sysinit/futures/build_multiple_prices_from_raw_data.py b/sysinit/futures/build_multiple_prices_from_raw_data.py index d2fb828a15..de83d7d769 100644 --- a/sysinit/futures/build_multiple_prices_from_raw_data.py +++ b/sysinit/futures/build_multiple_prices_from_raw_data.py @@ -94,7 +94,6 @@ def _get_price_data_between_rolls( roll_calendar_with_roll_index: rollCalendarWithRollIndex, dict_of_futures_contract_closing_prices: dictFuturesContractFinalPrices, ): - # consider consolidating input args roll_date_info = _calc_roll_date_info(roll_calendar_with_roll_index) @@ -154,7 +153,6 @@ def _calc_contract_date_info( roll_date_info: rollDateInfo, dict_of_futures_contract_closing_prices: dictFuturesContractFinalPrices, ) -> contractAndPriceInfo: - roll_calendar = roll_date_info.roll_calendar_with_roll_index.roll_calendar contracts_now = roll_calendar.loc[roll_date_info.next_roll_date, :] @@ -194,7 +192,6 @@ def _invalid_current_contract(contract_date_info: contractAndPriceInfo) -> bool: def _calculate_price_data_from_current_next_carry_data( roll_date_info: rollDateInfo, contract_date_info: contractAndPriceInfo ): - set_of_price_data = _get_current_next_carry_data(roll_date_info, contract_date_info) all_price_data = _build_all_price_data(set_of_price_data, contract_date_info) @@ -241,7 +238,6 @@ def _get_next_price_data( next_contract_str = contract_date_info.next_contract_str if next_contract_str not in contract_keys: - if _last_row_in_roll_calendar(roll_date_info): # Last entry, this is fine print( @@ -281,7 +277,6 @@ def _get_carry_price_data( carry_contract_str = contract_date_info.carry_contract_str if carry_contract_str not in contract_keys: - if _last_row_in_roll_calendar(roll_date_info): # Last entry, this is fine print( @@ -319,7 +314,6 @@ def _last_row_in_roll_calendar(roll_date_info: rollDateInfo): def _build_all_price_data(set_of_price_data, contract_date_info): - current_price_data, next_price_data, carry_price_data = set_of_price_data all_price_data = pd.concat( [current_price_data, next_price_data, carry_price_data], axis=1 diff --git a/sysinit/futures/build_roll_calendars.py b/sysinit/futures/build_roll_calendars.py index f6108e192d..8ee559983d 100644 --- a/sysinit/futures/build_roll_calendars.py +++ b/sysinit/futures/build_roll_calendars.py @@ -91,7 +91,6 @@ def last_roll_date(self): def _create_approx_calendar_from_earliest_contract( earliest_contract_with_roll_data: contractWithRollParametersAndPrices, ) -> pd.DataFrame: - roll_calendar_as_list = _listOfRollCalendarRows() # On the roll date we stop holding the current contract, and end up holding the next one @@ -118,7 +117,6 @@ def _create_approx_calendar_from_earliest_contract( def _get_new_row_of_roll_calendar( current_contract: contractWithRollParametersAndPrices, ) -> (contractWithRollParametersAndPrices, _rollCalendarRow): - roll_parameters = current_contract.roll_parameters final_contract_date_str = current_contract.prices.last_contract_date_str() @@ -238,7 +236,6 @@ def adjust_to_price_series( def _get_local_data_for_row_number( approx_calendar: pd.DataFrame, row_number: int, idx_of_last_row_in_data: int ) -> localRowData: - last_row_in_data = row_number == idx_of_last_row_in_data if last_row_in_data: return _last_row @@ -247,9 +244,7 @@ def _get_local_data_for_row_number( approx_row = approx_calendar.iloc[row_number, :] if not first_row_in_data: - prev_approx_row = approx_calendar.iloc[ - row_number - 1, - ] + prev_approx_row = approx_calendar.iloc[row_number - 1,] else: prev_approx_row = _bad_row @@ -274,7 +269,6 @@ def _adjust_row_of_approx_roll_calendar( dict_of_futures_contract_prices: dictFuturesContractFinalPrices, omit_carry: bool = False, ): - roll_date, date_to_avoid = _get_roll_date_and_date_to_avoid(local_row_data) set_of_prices = _get_set_of_prices( local_row_data, dict_of_futures_contract_prices, omit_carry @@ -283,7 +277,6 @@ def _adjust_row_of_approx_roll_calendar( _print_roll_date_error(local_row_data) return _bad_row try: - adjusted_roll_date = _find_best_matching_roll_date( roll_date, set_of_prices, @@ -317,7 +310,6 @@ def _get_set_of_prices( dict_of_futures_contract_prices: dictFuturesContractFinalPrices, omit_carry: bool = False, ) -> setOfPrices: - approx_row = local_row_data.current_row current_contract = str(approx_row.current_contract) @@ -448,7 +440,6 @@ def _find_best_matching_roll_date( def _required_paired_prices(set_of_prices: setOfPrices) -> pd.DataFrame: - no_carry_exists = set_of_prices.carry_prices is _no_carry_prices no_curr_carry_exists = set_of_prices.curr_carry_prices is _no_carry_prices if no_carry_exists or no_curr_carry_exists: @@ -497,7 +488,6 @@ def _valid_dates_from_matching_prices(paired_prices_matching, avoid_date): def _find_closest_valid_date_to_approx_roll_date(valid_dates, roll_date): - distance_to_roll = valid_dates - roll_date distance_to_roll_days = [ abs(distance_item.days) for distance_item in distance_to_roll @@ -511,7 +501,6 @@ def _find_closest_valid_date_to_approx_roll_date(valid_dates, roll_date): def _get_adjusted_row( local_row_data: localRowData, adjusted_roll_date ) -> _rollCalendarRow: - approx_row = local_row_data.current_row current_carry_contract = approx_row.carry_contract current_contract = approx_row.current_contract @@ -584,7 +573,6 @@ def _add_carry_calendar( def back_out_roll_calendar_from_multiple_prices( multiple_prices: futuresMultiplePrices, ) -> pd.DataFrame: - multiple_prices_unique = multiple_prices[ ~multiple_prices.index.duplicated(keep="last") ] @@ -601,7 +589,6 @@ def back_out_roll_calendar_from_multiple_prices( def _get_roll_calendar_from_unique_prices( multiple_prices_unique: pd.DataFrame, ) -> pd.DataFrame: - tuple_of_roll_dates = _get_time_indices_from_multiple_prices(multiple_prices_unique) roll_calendar = _get_roll_calendar_from_roll_dates_and_unique_prices( multiple_prices_unique, tuple_of_roll_dates @@ -628,7 +615,6 @@ def _get_time_indices_from_multiple_prices( def _get_roll_calendar_from_roll_dates_and_unique_prices( multiple_prices_unique: pd.DataFrame, tuple_of_roll_dates: tuple ) -> pd.DataFrame: - roll_dates, days_before = tuple_of_roll_dates current_contracts = _extract_contract_from_multiple_prices( diff --git a/sysinit/futures/clone_data_for_instrument.py b/sysinit/futures/clone_data_for_instrument.py index 447a64c30e..a428a83d37 100644 --- a/sysinit/futures/clone_data_for_instrument.py +++ b/sysinit/futures/clone_data_for_instrument.py @@ -1,4 +1,3 @@ - from sysproduction.data.prices import diagPrices from sysdata.csv.csv_roll_calendars import csvRollCalendarData @@ -6,7 +5,6 @@ from sysdata.csv.csv_adjusted_prices import csvFuturesAdjustedPricesData - from sysobjects.contracts import futuresContract from syscore.dateutils import DAILY_PRICE_FREQ, HOURLY_FREQ from sysobjects.adjusted_prices import futuresAdjustedPrices @@ -29,7 +27,6 @@ def clone_data_for_instrument( offset: float = 0.0, ignore_duplication: bool = False, ): - clone_prices_per_contract( instrument_from, instrument_to, @@ -65,7 +62,6 @@ def clone_prices_per_contract( multiplier: float = 1.0, offset: float = 0.0, ): - if list_of_contract_dates is None: list_of_contract_dates = db_data_individual_prices.contract_dates_with_merged_price_data_for_instrument_code( instrument_from @@ -94,7 +90,6 @@ def clone_single_contract( multiplier: float = 1.0, offset: float = 0.0, ): - futures_contract_from = futuresContract(instrument_from, contract_date) futures_contract_to = futuresContract(instrument_to, contract_date) @@ -160,7 +155,6 @@ def clone_single_contract( def clone_roll_calendar(instrument_from: str, instrument_to: str): - roll_calendar = csv_roll_calendar.get_roll_calendar(instrument_from) csv_roll_calendar.add_roll_calendar(instrument_to, roll_calendar=roll_calendar) @@ -173,7 +167,6 @@ def clone_multiple_prices( inverse: bool = False, offset: float = 0.0, ): - prices = db_data_multiple_prices.get_multiple_prices(instrument_from) if inverse: prices = prices.inverse() @@ -197,7 +190,6 @@ def clone_adjusted_prices( inverse: bool = False, offset: float = 0.0, ): - prices = db_data_adjusted_prices.get_adjusted_prices(instrument_from) if inverse: prices = futuresAdjustedPrices(1 / prices) diff --git a/sysinit/futures/contract_prices_from_csv_to_arctic.py b/sysinit/futures/contract_prices_from_csv_to_arctic.py index ebc80df75e..4d489e8bae 100644 --- a/sysinit/futures/contract_prices_from_csv_to_arctic.py +++ b/sysinit/futures/contract_prices_from_csv_to_arctic.py @@ -6,6 +6,7 @@ diag_prices = diagPrices() + def init_db_with_csv_futures_contract_prices( datapath: str, csv_config=arg_not_supplied ): diff --git a/sysinit/futures/create_hourly_and_daily.py b/sysinit/futures/create_hourly_and_daily.py index 1d0fa8d2b5..e12c41c6e6 100644 --- a/sysinit/futures/create_hourly_and_daily.py +++ b/sysinit/futures/create_hourly_and_daily.py @@ -7,6 +7,7 @@ diag_prices = diagPrices() + def write_split_data_for_instrument(instrument_code): a = diag_prices.db_futures_contract_price_data list_of_contracts = a.contracts_with_merged_price_data_for_instrument_code( diff --git a/sysinit/futures/multiple_and_adjusted_from_csv_to_arctic.py b/sysinit/futures/multiple_and_adjusted_from_csv_to_arctic.py index 4328689d1c..c334899994 100644 --- a/sysinit/futures/multiple_and_adjusted_from_csv_to_arctic.py +++ b/sysinit/futures/multiple_and_adjusted_from_csv_to_arctic.py @@ -6,6 +6,7 @@ diag_prices = diagPrices() + def init_arctic_with_csv_futures_contract_prices( multiple_price_datapath=arg_not_supplied, adj_price_datapath=arg_not_supplied ): diff --git a/sysinit/futures/multipleprices_from_db_prices_and_csv_calendars_to_db.py b/sysinit/futures/multipleprices_from_db_prices_and_csv_calendars_to_db.py index 95fd700993..7a5a65610a 100755 --- a/sysinit/futures/multipleprices_from_db_prices_and_csv_calendars_to_db.py +++ b/sysinit/futures/multipleprices_from_db_prices_and_csv_calendars_to_db.py @@ -31,6 +31,7 @@ diag_prices = diagPrices() + def _get_data_inputs(csv_roll_data_path, csv_multiple_data_path): csv_roll_calendars = csvRollCalendarData(csv_roll_data_path) db_individual_futures_prices = diag_prices.db_futures_contract_price_data @@ -45,9 +46,12 @@ def _get_data_inputs(csv_roll_data_path, csv_multiple_data_path): ) -def process_multiple_prices_all_instruments(csv_multiple_data_path=arg_not_supplied, - csv_roll_data_path=arg_not_supplied, ADD_TO_DB=True, ADD_TO_CSV=False): - +def process_multiple_prices_all_instruments( + csv_multiple_data_path=arg_not_supplied, + csv_roll_data_path=arg_not_supplied, + ADD_TO_DB=True, + ADD_TO_CSV=False, +): ( _not_used1, db_individual_futures_prices, @@ -60,16 +64,26 @@ def process_multiple_prices_all_instruments(csv_multiple_data_path=arg_not_suppl for instrument_code in instrument_list: print(instrument_code) - process_multiple_prices_single_instrument(instrument_code, csv_multiple_data_path=csv_multiple_data_path, - csv_roll_data_path=csv_roll_data_path, ADD_TO_DB=ADD_TO_DB, - ADD_TO_CSV=ADD_TO_CSV) - + process_multiple_prices_single_instrument( + instrument_code, + csv_multiple_data_path=csv_multiple_data_path, + csv_roll_data_path=csv_roll_data_path, + ADD_TO_DB=ADD_TO_DB, + ADD_TO_CSV=ADD_TO_CSV, + ) -def process_multiple_prices_single_instrument(instrument_code, target_instrument_code=arg_not_supplied, - adjust_calendar_to_prices=True, csv_multiple_data_path=arg_not_supplied, - csv_roll_data_path=arg_not_supplied, roll_parameters=arg_not_supplied, - roll_calendar=arg_not_supplied, ADD_TO_DB=True, ADD_TO_CSV=False): +def process_multiple_prices_single_instrument( + instrument_code, + target_instrument_code=arg_not_supplied, + adjust_calendar_to_prices=True, + csv_multiple_data_path=arg_not_supplied, + csv_roll_data_path=arg_not_supplied, + roll_parameters=arg_not_supplied, + roll_calendar=arg_not_supplied, + ADD_TO_DB=True, + ADD_TO_CSV=False, +): if target_instrument_code is arg_not_supplied: target_instrument_code = instrument_code ( @@ -80,9 +94,7 @@ def process_multiple_prices_single_instrument(instrument_code, target_instrument ) = _get_data_inputs(csv_roll_data_path, csv_multiple_data_path) dict_of_futures_contract_prices = ( - db_individual_futures_prices.get_merged_prices_for_instrument( - instrument_code - ) + db_individual_futures_prices.get_merged_prices_for_instrument(instrument_code) ) dict_of_futures_contract_closing_prices = ( dict_of_futures_contract_prices.final_prices() @@ -186,5 +198,7 @@ def add_phantom_row( csv_roll_data_path = arg_not_supplied # modify flags as required - process_multiple_prices_all_instruments(csv_multiple_data_path=csv_multiple_data_path, - csv_roll_data_path=csv_roll_data_path) + process_multiple_prices_all_instruments( + csv_multiple_data_path=csv_multiple_data_path, + csv_roll_data_path=csv_roll_data_path, + ) diff --git a/sysinit/futures/repocsv_spread_costs.py b/sysinit/futures/repocsv_spread_costs.py index 926d2c30f5..6708873940 100644 --- a/sysinit/futures/repocsv_spread_costs.py +++ b/sysinit/futures/repocsv_spread_costs.py @@ -43,7 +43,6 @@ def copy_spread_costs_from_csv_to_mongo(data: dataBlob): def process_new_instruments( data_in: spreadCostData, data_out: spreadCostData, new_instruments: list ): - if len(new_instruments) == 0: return None @@ -68,7 +67,6 @@ def process_new_instruments( def process_modified_instruments( data_in: spreadCostData, data_out: spreadCostData, modified_instruments: list ): - actually_modified_instruments = [] for instrument_code in modified_instruments: spread_for_instrument = data_in.get_spread_cost(instrument_code) @@ -105,7 +103,6 @@ def process_modified_instruments( def process_deleted_instruments(data_out: spreadCostData, deleted_instruments: list): - if len(deleted_instruments) == 0: return None diff --git a/sysinit/futures/rollcalendars_from_arcticprices_to_csv.py b/sysinit/futures/rollcalendars_from_arcticprices_to_csv.py index dfafeb64f5..67d14bad6c 100755 --- a/sysinit/futures/rollcalendars_from_arcticprices_to_csv.py +++ b/sysinit/futures/rollcalendars_from_arcticprices_to_csv.py @@ -7,7 +7,10 @@ from sysdata.csv.csv_roll_parameters import csvRollParametersData from sysdata.futures.rolls_parameters import rollParametersData from sysproduction.data.prices import get_valid_instrument_code_from_user, diagPrices -from sysproduction.data.production_data_objects import get_class_for_data_type, FUTURES_CONTRACT_PRICE_DATA +from sysproduction.data.production_data_objects import ( + get_class_for_data_type, + FUTURES_CONTRACT_PRICE_DATA, +) from sysdata.data_blob import dataBlob @@ -30,7 +33,6 @@ def build_and_write_roll_calendar( roll_parameters_data: rollParametersData = arg_not_supplied, roll_parameters: rollParameters = arg_not_supplied, ): - if output_datapath is arg_not_supplied: print( "*** WARNING *** This will overwrite the provided roll calendar. Might be better to use a temporary directory!" @@ -92,7 +94,6 @@ def build_and_write_roll_calendar( def check_saved_roll_calendar( instrument_code, input_datapath=arg_not_supplied, input_prices=arg_not_supplied ): - if input_datapath is None: print( "This will check the roll calendar in the default directory : are you are that's what you want to do?" diff --git a/sysinit/futures/safely_modify_roll_parameters.py b/sysinit/futures/safely_modify_roll_parameters.py index e95ba8b10d..29ddbf1902 100644 --- a/sysinit/futures/safely_modify_roll_parameters.py +++ b/sysinit/futures/safely_modify_roll_parameters.py @@ -55,12 +55,18 @@ def safely_modify_roll_parameters(data: dataBlob): print("Doing nothing") # return None - new_multiple_prices = process_multiple_prices_single_instrument(instrument_code=instrument_code, - csv_roll_data_path=output_path_for_temp_csv_files, - ADD_TO_DB=False, ADD_TO_CSV=False) - new_adjusted_prices = process_adjusted_prices_single_instrument(instrument_code, - multiple_prices=new_multiple_prices, - ADD_TO_DB=False, ADD_TO_CSV=False) + new_multiple_prices = process_multiple_prices_single_instrument( + instrument_code=instrument_code, + csv_roll_data_path=output_path_for_temp_csv_files, + ADD_TO_DB=False, + ADD_TO_CSV=False, + ) + new_adjusted_prices = process_adjusted_prices_single_instrument( + instrument_code, + multiple_prices=new_multiple_prices, + ADD_TO_DB=False, + ADD_TO_CSV=False, + ) diag_prices = diagPrices(data) existing_multiple_prices = diag_prices.get_multiple_prices(instrument_code) diff --git a/sysinit/futures/seed_price_data_from_IB.py b/sysinit/futures/seed_price_data_from_IB.py index 40716e2bb8..61cd521d8e 100644 --- a/sysinit/futures/seed_price_data_from_IB.py +++ b/sysinit/futures/seed_price_data_from_IB.py @@ -50,7 +50,6 @@ def seed_price_data_for_contract(data: dataBlob, contract_object: futuresContrac def seed_price_data_for_contract_at_frequency( data: dataBlob, contract_object: futuresContract, frequency: Frequency ): - data_broker = dataBroker(data) update_prices = updatePrices(data) log = contract_object.specific_log(data.log) diff --git a/sysinit/futures/spotfx_from_csvAndInvestingDotCom_to_db.py b/sysinit/futures/spotfx_from_csvAndInvestingDotCom_to_db.py index dfc74d0c41..d83965e7b9 100644 --- a/sysinit/futures/spotfx_from_csvAndInvestingDotCom_to_db.py +++ b/sysinit/futures/spotfx_from_csvAndInvestingDotCom_to_db.py @@ -16,7 +16,9 @@ ) -def spotfx_from_csv_and_investing_dot_com(datapath, ADD_TO_DB=True, ADD_TO_CSV=True, ADD_EXTRA_DATA=True): +def spotfx_from_csv_and_investing_dot_com( + datapath, ADD_TO_DB=True, ADD_TO_CSV=True, ADD_EXTRA_DATA=True +): # You can adapt this for different providers by changing these parameters if ADD_EXTRA_DATA: investingDotCom_csv_fx_prices = csvFxPricesData( @@ -27,7 +29,6 @@ def spotfx_from_csv_and_investing_dot_com(datapath, ADD_TO_DB=True, ADD_TO_CSV=T list_of_ccy_codes = my_csv_fx_prices_data.get_list_of_fxcodes() for currency_code in list_of_ccy_codes: - print(currency_code) fx_prices_my_csv = my_csv_fx_prices_data.get_fx_prices(currency_code) diff --git a/sysinit/futures/strategy_transfer.py b/sysinit/futures/strategy_transfer.py index 3f6118144a..8fbc6f203e 100644 --- a/sysinit/futures/strategy_transfer.py +++ b/sysinit/futures/strategy_transfer.py @@ -13,7 +13,6 @@ def transfer_positions_between_strategies( old_strategy: str, new_strategy: str, instruments_to_transfer=arg_not_supplied ): - data = dataBlob() old_positions = get_old_strategy_positions(data, old_strategy) if instruments_to_transfer is arg_not_supplied: @@ -47,7 +46,6 @@ def transfer_position_instrument( instrument_code: str, old_positions: dict, ): - current_position = old_positions[instrument_code] filled_price = get_last_price(data, instrument_code) balance_trade( diff --git a/sysinit/futures/tests/test_sysinit_futures.py b/sysinit/futures/tests/test_sysinit_futures.py index 9889a9f3f7..13f8e7640f 100644 --- a/sysinit/futures/tests/test_sysinit_futures.py +++ b/sysinit/futures/tests/test_sysinit_futures.py @@ -9,7 +9,6 @@ class TestFuturesInit: - csv_config = ConfigCsvFuturesPrices( input_date_index_name="Time", input_skiprows=0, diff --git a/sysinit/transfer/backup_arctic_to_parquet.py b/sysinit/transfer/backup_arctic_to_parquet.py index 661175a819..27cf71a7ee 100644 --- a/sysinit/transfer/backup_arctic_to_parquet.py +++ b/sysinit/transfer/backup_arctic_to_parquet.py @@ -11,13 +11,19 @@ from sysdata.parquet.parquet_adjusted_prices import parquetFuturesAdjustedPricesData from sysdata.parquet.parquet_capital import parquetCapitalData -from sysdata.parquet.parquet_futures_per_contract_prices import parquetFuturesContractPriceData +from sysdata.parquet.parquet_futures_per_contract_prices import ( + parquetFuturesContractPriceData, +) from sysdata.parquet.parquet_multiple_prices import parquetFuturesMultiplePricesData from sysdata.parquet.parquet_spotfx_prices import parquetFxPricesData from sysdata.parquet.parquet_spreads import parquetSpreadsForInstrumentData from sysdata.parquet.parquet_optimal_positions import parquetOptimalPositionData -from sysdata.parquet.parquet_historic_contract_positions import parquetContractPositionData -from sysdata.parquet.parquet_historic_strategy_positions import parquetStrategyPositionData +from sysdata.parquet.parquet_historic_contract_positions import ( + parquetContractPositionData, +) +from sysdata.parquet.parquet_historic_strategy_positions import ( + parquetStrategyPositionData, +) from sysdata.csv.csv_futures_contracts import csvFuturesContractData from sysdata.csv.csv_contract_position_data import csvContractPositionData @@ -60,72 +66,66 @@ def backup_arctic_to_parquet(): - - backup_data = get_data_blob("backup_arctic_to_parquet") - log = backup_data.log - - log.debug("Dumping from arctic, mongo to parquet files") - do = true_if_answer_is_yes("Do futures contract prices?") - if do: - backup_futures_contract_prices_to_parquet(backup_data) - - do = true_if_answer_is_yes("FX?") - if do: - backup_fx_to_parquet(backup_data) - do = true_if_answer_is_yes("Multiple prices?") - if do: - backup_multiple_to_parquet(backup_data) - do = true_if_answer_is_yes("Adjusted prices?") - if do: - backup_adj_to_parquet(backup_data) - do = true_if_answer_is_yes("Strategy positions?") - if do: - backup_strategy_position_data(backup_data) - do = true_if_answer_is_yes("Contract positions?") - if do: - backup_contract_position_data(backup_data) - - do = true_if_answer_is_yes("Capital?") - if do: - backup_capital(backup_data) - do = true_if_answer_is_yes("Time series of spread costs?") - if do: - backup_spreads_to_parquet(backup_data) - do = true_if_answer_is_yes("optimal positions?") - if do: - backup_optimal_positions(backup_data) - - # backup_contract_data(backup_data) - # backup_historical_orders(backup_data) - #backup_roll_state_data(backup_data) + backup_data = get_data_blob("backup_arctic_to_parquet") + log = backup_data.log + + log.debug("Dumping from arctic, mongo to parquet files") + do = true_if_answer_is_yes("Do futures contract prices?") + if do: + backup_futures_contract_prices_to_parquet(backup_data) + + do = true_if_answer_is_yes("FX?") + if do: + backup_fx_to_parquet(backup_data) + do = true_if_answer_is_yes("Multiple prices?") + if do: + backup_multiple_to_parquet(backup_data) + do = true_if_answer_is_yes("Adjusted prices?") + if do: + backup_adj_to_parquet(backup_data) + do = true_if_answer_is_yes("Strategy positions?") + if do: + backup_strategy_position_data(backup_data) + do = true_if_answer_is_yes("Contract positions?") + if do: + backup_contract_position_data(backup_data) + + do = true_if_answer_is_yes("Capital?") + if do: + backup_capital(backup_data) + do = true_if_answer_is_yes("Time series of spread costs?") + if do: + backup_spreads_to_parquet(backup_data) + do = true_if_answer_is_yes("optimal positions?") + if do: + backup_optimal_positions(backup_data) + + # backup_contract_data(backup_data) + # backup_historical_orders(backup_data) + # backup_roll_state_data(backup_data) def get_data_blob(logname): - - data = dataBlob( - log_name=logname, - keep_original_prefix=True - ) + data = dataBlob(log_name=logname, keep_original_prefix=True) data.add_class_list( [ parquetFuturesMultiplePricesData, - #csvBrokerHistoricOrdersData, + # csvBrokerHistoricOrdersData, parquetCapitalData, - #csvContractHistoricOrdersData, + # csvContractHistoricOrdersData, parquetContractPositionData, parquetFuturesAdjustedPricesData, - #csvFuturesContractData, + # csvFuturesContractData, parquetFxPricesData, parquetOptimalPositionData, - #csvRollStateData, - #csvSpreadCostData, + # csvRollStateData, + # csvSpreadCostData, parquetSpreadsForInstrumentData, - #csvStrategyHistoricOrdersData, + # csvStrategyHistoricOrdersData, parquetStrategyPositionData, parquetFuturesContractPriceData, ], - ) data.add_class_list( @@ -158,23 +158,23 @@ def backup_adj_to_parquet(data): for instrument_code in instrument_list: backup_adj_to_parquet_for_instrument(data, instrument_code) + def backup_adj_to_parquet_for_instrument(data: dataBlob, instrument_code: str): arctic_data = data.arctic_futures_adjusted_prices.get_adjusted_prices( instrument_code ) px = data.parquet_futures_adjusted_prices.get_adjusted_prices(instrument_code) - if len(px)>=len(arctic_data): + if len(px) >= len(arctic_data): data.log.warning("Appears to be more parquet data, not doing this") return try: data.parquet_futures_adjusted_prices.add_adjusted_prices( instrument_code, arctic_data, ignore_duplication=True ) - px = data.parquet_futures_adjusted_prices.get_adjusted_prices( - instrument_code - ) + px = data.parquet_futures_adjusted_prices.get_adjusted_prices(instrument_code) data.log.debug( - "Written .parquet backup for adjusted prices %s, %s" % (instrument_code,str(px)) + "Written .parquet backup for adjusted prices %s, %s" + % (instrument_code, str(px)) ) except BaseException: data.log.warning( @@ -189,8 +189,7 @@ def backup_futures_contract_prices_to_parquet(data): ) for instrument_code in instrument_list: backup_futures_contract_prices_for_instrument_to_parquet( - data=data, - instrument_code=instrument_code + data=data, instrument_code=instrument_code ) @@ -211,7 +210,6 @@ def backup_futures_contract_prices_for_instrument_to_parquet( def backup_futures_contract_prices_for_contract_to_parquet( data: dataBlob, futures_contract: futuresContract ): - arctic_data = ( data.arctic_futures_contract_price.get_merged_prices_for_contract_object( futures_contract @@ -222,7 +220,7 @@ def backup_futures_contract_prices_for_contract_to_parquet( futures_contract ) ) - if len(parquet_data)>=len(arctic_data): + if len(parquet_data) >= len(arctic_data): data.log.warning("More parquet data, not doing") return @@ -237,31 +235,27 @@ def backup_futures_contract_prices_for_contract_to_parquet( ) ) data.log.debug( - "Written backup .csv of prices for %s was %s now %s" % (str(futures_contract), arctic_data, parquet_data) + "Written backup .csv of prices for %s was %s now %s" + % (str(futures_contract), arctic_data, parquet_data) ) for frequency in [DAILY_PRICE_FREQ, HOURLY_FREQ]: - arctic_data = ( - data.arctic_futures_contract_price.get_prices_at_frequency_for_contract_object( - futures_contract, - frequency=frequency - ) + arctic_data = data.arctic_futures_contract_price.get_prices_at_frequency_for_contract_object( + futures_contract, frequency=frequency ) data.parquet_futures_contract_price.write_prices_at_frequency_for_contract_object( futures_contract_object=futures_contract, futures_price_data=arctic_data, frequency=frequency, - ignore_duplication=True + ignore_duplication=True, ) - parquet_data = ( - data.parquet_futures_contract_price.get_prices_at_frequency_for_contract_object( - futures_contract, - frequency=frequency - ) + parquet_data = data.parquet_futures_contract_price.get_prices_at_frequency_for_contract_object( + futures_contract, frequency=frequency ) data.log.debug( - "Written backup .csv of prices at frequency %s for %s was %s now %s" % (str(frequency), str(futures_contract), arctic_data, parquet_data) + "Written backup .csv of prices at frequency %s for %s was %s now %s" + % (str(frequency), str(futures_contract), arctic_data, parquet_data) ) @@ -276,31 +270,29 @@ def backup_multiple_to_parquet_for_instrument(data, instrument_code: str): instrument_code ) parquet_data = data.parquet_futures_multiple_prices.get_multiple_prices( - instrument_code) - if len(parquet_data)>=len(arctic_data): + instrument_code + ) + if len(parquet_data) >= len(arctic_data): data.log.warning("More parquet data, skipping") return data.parquet_futures_multiple_prices.add_multiple_prices( instrument_code, arctic_data, ignore_duplication=True ) - new_data = data.parquet_futures_multiple_prices.get_multiple_prices( - instrument_code) + new_data = data.parquet_futures_multiple_prices.get_multiple_prices(instrument_code) data.log.debug( - "Written .csv backup multiple prices for %s was %s now %s" % (instrument_code, - arctic_data, - new_data) + "Written .csv backup multiple prices for %s was %s now %s" + % (instrument_code, arctic_data, new_data) ) - # fx def backup_fx_to_parquet(data): fx_codes = data.arctic_fx_prices.get_list_of_fxcodes() for fx_code in fx_codes: arctic_data = data.arctic_fx_prices.get_fx_prices(fx_code) parquet_data = data.parquet_fx_prices.get_fx_prices(fx_code) - if len(parquet_data)>=len(arctic_data): + if len(parquet_data) >= len(arctic_data): data.log.debug("No fx backup needed for %s" % fx_code) else: # Write backup @@ -308,9 +300,10 @@ def backup_fx_to_parquet(data): fx_code, arctic_data, ignore_duplication=True ) parquet_data = data.parquet_fx_prices.get_fx_prices(fx_code) - data.log.debug("Written fx for %s, was %s now %s" % (fx_code, arctic_data, parquet_data)) - - + data.log.debug( + "Written fx for %s, was %s now %s" + % (fx_code, arctic_data, parquet_data) + ) def backup_spreads_to_parquet(data: dataBlob): @@ -323,15 +316,18 @@ def backup_spreads_to_parquet_for_instrument(data: dataBlob, instrument_code: st arctic_data = data.arctic_spreads_for_instrument.get_spreads(instrument_code) parquet_data = data.parquet_spreads_for_instrument.get_spreads(instrument_code) - if len(parquet_data)>=len(arctic_data): + if len(parquet_data) >= len(arctic_data): data.log.debug("No spreads backup needed for %s" % instrument_code) pass else: - data.parquet_spreads_for_instrument.add_spreads( - instrument_code, arctic_data, ignore_duplication=True - ) - parquet_data = data.parquet_spreads_for_instrument.get_spreads(instrument_code) - data.log.debug("Written .csv backup for spreads %s was %s now %s" % (instrument_code, str(arctic_data), str(parquet_data))) + data.parquet_spreads_for_instrument.add_spreads( + instrument_code, arctic_data, ignore_duplication=True + ) + parquet_data = data.parquet_spreads_for_instrument.get_spreads(instrument_code) + data.log.debug( + "Written .csv backup for spreads %s was %s now %s" + % (instrument_code, str(arctic_data), str(parquet_data)) + ) def backup_contract_position_data(data): @@ -360,7 +356,7 @@ def backup_contract_position_data(data): except missingData: parquet_data = [] - if len(parquet_data)>=len(arctic_data): + if len(parquet_data) >= len(arctic_data): data.log.debug("Skipping") continue @@ -372,7 +368,8 @@ def backup_contract_position_data(data): ) data.log.debug( - "Backed up %s %s contract position data was %s now %s" % (instrument_code, contract, str(arctic_data), str(parquet_data)) + "Backed up %s %s contract position data was %s now %s" + % (instrument_code, contract, str(arctic_data), str(parquet_data)) ) @@ -399,7 +396,7 @@ def backup_strategy_position_data(data): ) except missingData: parquet_data = [] - if len(parquet_data)>=len(arctic_data): + if len(parquet_data) >= len(arctic_data): data.log.debug("Skipping") continue @@ -444,24 +441,34 @@ def backup_historical_orders(data): def backup_capital(data): - strategy_list = data.arctic_capital._get_list_of_strategies_with_capital_including_total() + strategy_list = ( + data.arctic_capital._get_list_of_strategies_with_capital_including_total() + ) for strategy_name in strategy_list: - strategy_capital_data=data.arctic_capital.get_capital_pd_df_for_strategy(strategy_name) - parquet_data = data.parquet_capital.get_capital_pd_df_for_strategy(strategy_name) - if len(parquet_data)>strategy_capital_data: + strategy_capital_data = data.arctic_capital.get_capital_pd_df_for_strategy( + strategy_name + ) + parquet_data = data.parquet_capital.get_capital_pd_df_for_strategy( + strategy_name + ) + if len(parquet_data) > strategy_capital_data: data.log.warning("More parquet data, skipping") - data.parquet_capital.update_capital_pd_df_for_strategy(strategy_name=strategy_name, updated_capital_df=strategy_capital_data) - written_data = data.parquet_capital.get_capital_pd_df_for_strategy(strategy_name) - print("Wrote capital data for strategy %s, was %s now %s" % (strategy_name, str(strategy_capital_data), str(written_data))) + data.parquet_capital.update_capital_pd_df_for_strategy( + strategy_name=strategy_name, updated_capital_df=strategy_capital_data + ) + written_data = data.parquet_capital.get_capital_pd_df_for_strategy( + strategy_name + ) + print( + "Wrote capital data for strategy %s, was %s now %s" + % (strategy_name, str(strategy_capital_data), str(written_data)) + ) return strategy_capital_data - - def backup_optimal_positions(data): - strategy_instrument_list = ( data.arctic_optimal_position.get_list_of_instrument_strategies_with_optimal_position() ) @@ -479,9 +486,9 @@ def backup_optimal_positions(data): instrument_strategy ) except missingData: - parquet_data=[] + parquet_data = [] - if len(parquet_data)>=len(arctic_data): + if len(parquet_data) >= len(arctic_data): data.log.debug("skipping already written") data.parquet_optimal_position.write_optimal_position_as_df_for_instrument_strategy_without_checking( @@ -491,12 +498,16 @@ def backup_optimal_positions(data): instrument_strategy ) - data.log.debug("Backed up %s optimal position data was %s now %s" % (str(instrument_strategy), str(arctic_data), str(parquet_data))) + data.log.debug( + "Backed up %s optimal position data was %s now %s" + % (str(instrument_strategy), str(arctic_data), str(parquet_data)) + ) def backup_spread_cost_data(data): pass + def backup_roll_state_data(data): instrument_list = data.mongo_roll_state.get_list_of_instruments() roll_state_list = [] diff --git a/syslogdiag/email_via_db_interface.py b/syslogdiag/email_via_db_interface.py index 3deb1886b1..df37ae7708 100644 --- a/syslogdiag/email_via_db_interface.py +++ b/syslogdiag/email_via_db_interface.py @@ -75,7 +75,6 @@ def can_we_send_this_email_now(data, subject, email_is_report=False): def store_and_warn_email(data, body, subject, email_is_report=False): - warning_sent = have_we_sent_warning_email_for_this_subject(data, subject) if not warning_sent: send_warning_email(data, subject) diff --git a/syslogdiag/log_entry.py b/syslogdiag/log_entry.py index 4b9a20c571..96c4a1cc7f 100644 --- a/syslogdiag/log_entry.py +++ b/syslogdiag/log_entry.py @@ -29,7 +29,6 @@ def __init__( attributes: dict = arg_not_supplied, log_id: int = 0, ): - if attributes is arg_not_supplied: attributes = {} diff --git a/syslogdiag/mongo_email_control.py b/syslogdiag/mongo_email_control.py index 97045bb79c..f02a5bd992 100644 --- a/syslogdiag/mongo_email_control.py +++ b/syslogdiag/mongo_email_control.py @@ -26,7 +26,6 @@ class mongoEmailControlData(emailControlData): def __init__(self, mongo_db=None, log=get_logger("mongoEmailControlData")): - super().__init__(log=log) self._mongo_data = mongoDataWithMultipleKeys( EMAIL_CONTROL_COLLECTION, @@ -66,7 +65,6 @@ def _get_time_last_email_of_type_sent_with_this_subject(self, subject, type): return result_as_datetime def record_date_of_email_send(self, subject): - self._record_date_of_email_type_send(subject, type=LAST_EMAIL_SENT) def record_date_of_email_warning_send(self, subject): diff --git a/syslogdiag/pst_logger.py b/syslogdiag/pst_logger.py index 1658aa199a..e11c86ecc8 100644 --- a/syslogdiag/pst_logger.py +++ b/syslogdiag/pst_logger.py @@ -346,7 +346,6 @@ def log(self, *args, **kwargs): pass def log_handle_caller(self, *args, **kwargs): - pass def get_next_log_id(self) -> int: diff --git a/syslogging/server.py b/syslogging/server.py index 0a756bda5f..49bb55cb36 100644 --- a/syslogging/server.py +++ b/syslogging/server.py @@ -54,7 +54,6 @@ def shutdown(self) -> None: def logging_server(): - """ Adapted from: https://code.activestate.com/recipes/577025-loggingwebmonitor-a-central-logging-server-and-mon/ diff --git a/sysobjects/adjusted_prices.py b/sysobjects/adjusted_prices.py index 14be29d7e4..41194d5e24 100644 --- a/sysobjects/adjusted_prices.py +++ b/sysobjects/adjusted_prices.py @@ -175,7 +175,6 @@ def _calc_new_multiple_prices( existing_adjusted_prices: futuresAdjustedPrices, updated_multiple_prices: futuresMultiplePrices, ) -> (futuresMultiplePrices, str): - last_date_in_current_adj = existing_adjusted_prices.index[-1] multiple_prices_as_dict = updated_multiple_prices.as_dict() diff --git a/sysobjects/contract_dates_and_expiries.py b/sysobjects/contract_dates_and_expiries.py index 5da915a995..1e5491ec5e 100644 --- a/sysobjects/contract_dates_and_expiries.py +++ b/sysobjects/contract_dates_and_expiries.py @@ -237,7 +237,6 @@ def date_str_to_year_month(self) -> (int, str): return current_year_int, current_month_str def as_date(self): - tuple_of_dates = self._as_date_tuple() return datetime.datetime(*tuple_of_dates) @@ -368,7 +367,6 @@ def list_of_date_str(self): return list_of_date_str def index_of_sorted_contract_dates(self) -> list: - clist = self.list_of_date_str return sorted(range(len(clist)), key=lambda k: clist[k]) diff --git a/sysobjects/contracts.py b/sysobjects/contracts.py index 3c18bd5873..4ad56d7202 100644 --- a/sysobjects/contracts.py +++ b/sysobjects/contracts.py @@ -241,7 +241,6 @@ def new_contract_with_replaced_instrument_object(self, new_instrument_object): ) def update_expiry_dates_one_at_a_time_with_method(self, method, **kwargs): - as_list_of_individual_contracts = self.as_list_of_individual_contracts() new_expiries = [ method(single_contract, **kwargs) @@ -289,7 +288,6 @@ def list_of_single_contract_dates(self) -> list: def _resolve_args_for_futures_contract( instrument_object, contract_date_object ) -> tuple: - if isinstance(instrument_object, str): instrument_object = futuresInstrument(instrument_object) @@ -372,7 +370,6 @@ def as_dict(self) -> dict: return contract_dict def difference(self, another_contract_list): - self_contract_dates = set(self.list_of_dates()) another_contract_list_dates = set(another_contract_list.list_of_dates()) diff --git a/sysobjects/dict_of_futures_per_contract_prices.py b/sysobjects/dict_of_futures_per_contract_prices.py index 046c4052b1..16e9e09f95 100644 --- a/sysobjects/dict_of_futures_per_contract_prices.py +++ b/sysobjects/dict_of_futures_per_contract_prices.py @@ -37,13 +37,11 @@ def _get_and_set_sorted_contract_date_str(self): return all_contract_date_str_sorted def last_contract_date_str(self): - all_contract_date_str_sorted = self.sorted_contract_date_str() return all_contract_date_str_sorted.final_date_str() def joint_data(self): - joint_data = [ pd.Series(prices, name=contractid) for contractid, prices in self.items() ] diff --git a/sysobjects/dict_of_named_futures_per_contract_prices.py b/sysobjects/dict_of_named_futures_per_contract_prices.py index ad6bf728d3..b4c83acd0e 100644 --- a/sysobjects/dict_of_named_futures_per_contract_prices.py +++ b/sysobjects/dict_of_named_futures_per_contract_prices.py @@ -170,7 +170,6 @@ def _merge_futures_contract_final_prices_with_contract_id( original_data: futuresNamedContractFinalPricesWithContractID, new_data: futuresNamedContractFinalPricesWithContractID, ) -> futuresNamedContractFinalPricesWithContractID: - if len(new_data) == 0: return original_data @@ -199,7 +198,6 @@ def _assert_merge_is_valid( original_data: futuresNamedContractFinalPricesWithContractID, new_data: futuresNamedContractFinalPricesWithContractID, ): - last_contract_in_original_data = original_data.final_contract() try: diff --git a/sysobjects/fills.py b/sysobjects/fills.py index c40401d63f..2ca83422dc 100644 --- a/sysobjects/fills.py +++ b/sysobjects/fills.py @@ -61,7 +61,6 @@ def as_pd_df(self) -> pd.DataFrame: @classmethod def from_position_series_and_prices(cls, positions: pd.Series, price: pd.Series): - list_of_fills = _list_of_fills_from_position_series_and_prices( positions=positions, price=price ) @@ -72,7 +71,6 @@ def from_position_series_and_prices(cls, positions: pd.Series, price: pd.Series) def _list_of_fills_from_position_series_and_prices( positions: pd.Series, price: pd.Series ) -> ListOfFills: - ( trades_without_zeros, prices_aligned_to_trades, diff --git a/sysobjects/instruments.py b/sysobjects/instruments.py index b77e06e684..99cbe91a96 100644 --- a/sysobjects/instruments.py +++ b/sysobjects/instruments.py @@ -80,7 +80,6 @@ def __init__( PerTrade: float = 0.0, Region: str = "", ): - self.Description = Description self.Currency = Currency self.Pointsize = _zero_if_nan(Pointsize) @@ -223,7 +222,6 @@ def as_pd(self) -> pd.Series: def all_instruments_in_asset_class( self, asset_class: str, must_be_in=arg_not_supplied ) -> list: - asset_class_instrument_list = [ instrument for instrument, item_asset_class in self.items() @@ -351,7 +349,6 @@ def calculate_cost_instrument_currency( price: float, include_slippage: bool = True, ) -> float: - value_per_block = price * block_price_multiplier if include_slippage: slippage = self.calculate_slippage_instrument_currency( diff --git a/sysobjects/multiple_prices.py b/sysobjects/multiple_prices.py index d2f1b1ac74..349d2c400d 100644 --- a/sysobjects/multiple_prices.py +++ b/sysobjects/multiple_prices.py @@ -48,7 +48,6 @@ def concat_with_multiple_prices(self, multiple_prices, timedelta_seconds=1): return new_multiple_prices def as_aligned_pd_row(self, time_index: datetime.timedelta) -> pd.DataFrame: - new_dict = { price_name: self.price, carry_name: self.carry, @@ -69,7 +68,6 @@ def as_aligned_pd_row(self, time_index: datetime.timedelta) -> pd.DataFrame: class futuresMultiplePrices(pd.DataFrame): def __init__(self, data): - _check_valid_multiple_price_data(data) super().__init__(data) diff --git a/sysobjects/production/backtest_storage.py b/sysobjects/production/backtest_storage.py index 993798a844..eea13c6b7a 100644 --- a/sysobjects/production/backtest_storage.py +++ b/sysobjects/production/backtest_storage.py @@ -171,7 +171,6 @@ def get_list_of_stages(self): return self.system.stage_names def get_stage(self, stage_name): - return getattr(self.system, stage_name) def get_list_of_methods_for_stage(self, stage_name): diff --git a/sysobjects/production/capital.py b/sysobjects/production/capital.py index 89f9069ced..0d8f238aad 100644 --- a/sysobjects/production/capital.py +++ b/sysobjects/production/capital.py @@ -15,7 +15,6 @@ def __init__( prev_pandl_cum_acc: float, calc_method: str, ): - self._new_broker_account_value = new_broker_account_value self._calc_method = calc_method self._prev_broker_account_value = prev_broker_account_value diff --git a/sysobjects/production/optimal_positions.py b/sysobjects/production/optimal_positions.py index 2eb83d6f26..9dae45f819 100644 --- a/sysobjects/production/optimal_positions.py +++ b/sysobjects/production/optimal_positions.py @@ -192,7 +192,6 @@ def add_optimal_position_entry_row_to_positions_as_df( existing_optimal_positions_as_df: pd.DataFrame, position_entry: simpleOptimalPosition, ) -> pd.DataFrame: - _check_append_positions_okay( existing_optimal_positions_as_df=existing_optimal_positions_as_df, position_entry=position_entry, @@ -279,7 +278,6 @@ def __init__( instrument_strategy: instrumentStrategy, optimal_position_object: simpleOptimalPosition, ): - self.instrument_strategy = instrument_strategy self.optimal_position = optimal_position_object @@ -424,7 +422,6 @@ def as_pd(self) -> pd.DataFrame: def add_positions( self, position_list: listOfInstrumentStrategyPositions ) -> listOfOptimalAndCurrentPositionsAcrossInstrumentStrategies: - list_of_optimal_and_current = [] for opt_pos_object in self: instrument_strategy = opt_pos_object.instrument_strategy diff --git a/sysobjects/production/override.py b/sysobjects/production/override.py index 67f6f16e78..c3a5d40dce 100644 --- a/sysobjects/production/override.py +++ b/sysobjects/production/override.py @@ -187,7 +187,6 @@ def _apply_float_override( def _apply_reduce_only( original_position_no_override: int, proposed_trade: Order ) -> Order: - proposed_trade_value = proposed_trade.trade.as_single_trade_qty_or_error() desired_new_position = original_position_no_override + proposed_trade_value if sign(desired_new_position) != sign(original_position_no_override): diff --git a/sysobjects/production/position_limits.py b/sysobjects/production/position_limits.py index 6559ecb926..b910902c54 100644 --- a/sysobjects/production/position_limits.py +++ b/sysobjects/production/position_limits.py @@ -112,7 +112,6 @@ def apply_position_limit_to_order(self, order: Order) -> Order: def apply_position_limit_to_order( position: int, position_limit: int, order: Order ) -> Order: - ## POSIITON LIMITS CAN ONLY BE APPLIED TO SINGLE LEG TRADES, EG INSTRUMENT ORDERS proposed_trade = order.as_single_trade_qty_or_error() possible_trade = apply_position_limit_to_single_leg_trade( diff --git a/sysobjects/production/positions.py b/sysobjects/production/positions.py index 5637f0edf2..c447ee158a 100644 --- a/sysobjects/production/positions.py +++ b/sysobjects/production/positions.py @@ -48,7 +48,6 @@ def instrument_code(self) -> str: class instrumentStrategyPosition(Position): def __init__(self, position: int, instrument_strategy: instrumentStrategy): - super().__init__(position, instrument_strategy) @property @@ -311,7 +310,6 @@ def _id_column_dict(self) -> dict: def position_object_for_instrument_strategy( self, instrument_strategy: instrumentStrategy ): - result = list( filter( lambda position: position.instrument_strategy == instrument_strategy, diff --git a/sysobjects/production/trade_limits.py b/sysobjects/production/trade_limits.py index da0bfd88d9..f26409f878 100644 --- a/sysobjects/production/trade_limits.py +++ b/sysobjects/production/trade_limits.py @@ -16,7 +16,6 @@ def __init__( trades_since_last_reset: int = 0, last_reset_time: datetime.datetime = arg_not_supplied, ): - self._trade_limit = int(trade_limit) self._period_days = period_days self._timedelta = datetime.timedelta(days=period_days) diff --git a/sysobjects/production/tradeable_object.py b/sysobjects/production/tradeable_object.py index 6437222944..62f30f6b33 100644 --- a/sysobjects/production/tradeable_object.py +++ b/sysobjects/production/tradeable_object.py @@ -162,7 +162,6 @@ def __init__(self, strategy_name: str, instrument_code: str, contract_id): def from_strategy_name_and_contract_object( futuresContractStrategy, strategy_name: str, futures_contract: futuresContract ): - return futuresContractStrategy( strategy_name=strategy_name, contract_id=futures_contract.date_str, diff --git a/sysobjects/production/trading_hours/intersection_of_weekly_and_specific_trading_hours.py b/sysobjects/production/trading_hours/intersection_of_weekly_and_specific_trading_hours.py index ead1b08f3c..ded648012e 100644 --- a/sysobjects/production/trading_hours/intersection_of_weekly_and_specific_trading_hours.py +++ b/sysobjects/production/trading_hours/intersection_of_weekly_and_specific_trading_hours.py @@ -15,7 +15,6 @@ def intersection_of_any_weekly_and_list_of_normal_trading_hours( list_of_trading_hours: listOfTradingHours, weekly_any_trading_hours: weekdayDictOfListOfTradingHoursAnyDay, ) -> listOfTradingHours: - list_of_intersecting_trading_hours = [] for trading_hours in list_of_trading_hours: intersected_trading_hours = intersection_of_any_weekly_and_trading_hours( @@ -33,7 +32,6 @@ def intersection_of_any_weekly_and_trading_hours( trading_hours: tradingHours, weekly_any_trading_hours: weekdayDictOfListOfTradingHoursAnyDay, ) -> listOfTradingHours: - trading_hours_open_weekday = trading_hours.opening_time.weekday() trading_hours_close_weekday = trading_hours.closing_time.weekday() @@ -57,7 +55,6 @@ def intersection_of_any_weekly_and_trading_hours_spanning_days( trading_hours: tradingHours, weekly_any_trading_hours: weekdayDictOfListOfTradingHoursAnyDay, ) -> listOfTradingHours: - list_of_split_hours = split_trading_hours_across_two_weekdays(trading_hours) list_of_trading_hours = [] for one_day_trading_hours in list_of_split_hours: @@ -74,7 +71,6 @@ def intersection_of_any_weekly_and_trading_hours_spanning_days( def intersect_trading_hours_with_hours_for_weekday( trading_hours: tradingHours, trading_hours_for_weekday: listOfTradingHoursAnyDay ) -> listOfTradingHours: - ## at this point the open and close date will be the same opening_date = trading_hours.opening_time.date() list_of_weekday_trading_hours_with_this_date = trading_hours_for_weekday.add_date( diff --git a/sysobjects/production/trading_hours/weekly_trading_hours_any_day.py b/sysobjects/production/trading_hours/weekly_trading_hours_any_day.py index fe0e553262..f224fa5943 100644 --- a/sysobjects/production/trading_hours/weekly_trading_hours_any_day.py +++ b/sysobjects/production/trading_hours/weekly_trading_hours_any_day.py @@ -16,7 +16,6 @@ def intersect( self, weekday_dict_of_list_of_open_times: "weekdayDictOfListOfTradingHoursAnyDay", ) -> "weekdayDictOfListOfTradingHoursAnyDay": - return intersection_weekday_dict_of_list_of_trading_hours_any_day( self, weekday_dict_of_list_of_open_times ) @@ -51,7 +50,6 @@ def intersection_weekday_dict_of_list_of_trading_hours_any_day( first_dict: weekdayDictOfListOfTradingHoursAnyDay, second_dict: weekdayDictOfListOfTradingHoursAnyDay, ) -> weekdayDictOfListOfTradingHoursAnyDay: - new_dict = dict( [ (weekday, first_dict[weekday].intersect(second_dict[weekday])) diff --git a/sysobjects/roll_calendars.py b/sysobjects/roll_calendars.py index 704980511b..264522e894 100644 --- a/sysobjects/roll_calendars.py +++ b/sysobjects/roll_calendars.py @@ -122,7 +122,6 @@ def _check_row_of_row_calendar( calendar_row: pd.Series, dict_of_futures_contract_prices: dictFuturesContractFinalPrices, ) -> bool: - current_contract = str(calendar_row.current_contract) next_contract = str(calendar_row.next_contract) carry_contract = str(calendar_row.carry_contract) diff --git a/sysobjects/roll_parameters_with_price_data.py b/sysobjects/roll_parameters_with_price_data.py index 940d5b3deb..cb509a715a 100644 --- a/sysobjects/roll_parameters_with_price_data.py +++ b/sysobjects/roll_parameters_with_price_data.py @@ -248,7 +248,6 @@ def _find_earliest_held_contract_with_data( roll_parameters_object: rollParameters, price_dict: dictFuturesContractFinalPrices, ) -> contractWithRollParametersAndPrices: - try_contract = _initial_contract_to_try_with( list_of_contract_dates, roll_parameters_object, price_dict ) @@ -273,7 +272,6 @@ def _initial_contract_to_try_with( roll_parameters_object: rollParameters, price_dict: dictFuturesContractFinalPrices, ) -> contractWithRollParametersAndPrices: - plausible_earliest_contract_date = list_of_contract_dates[0] plausible_earliest_contract = contractDateWithRollParameters( contractDate( @@ -294,7 +292,6 @@ def _check_valid_contract( try_contract: contractWithRollParametersAndPrices, list_of_contract_dates: listOfContractDateStr, ) -> bool: - if try_contract.date_str in list_of_contract_dates: # possible candidate, let's check carry try: diff --git a/sysobjects/rolls.py b/sysobjects/rolls.py index 423137264c..4fd17f161f 100644 --- a/sysobjects/rolls.py +++ b/sysobjects/rolls.py @@ -17,7 +17,6 @@ class rollCycle(object): """ def __init__(self, cyclestring: str): - assert isinstance(cyclestring, str) self._cyclestring = "".join(sorted(cyclestring)) @@ -260,13 +259,11 @@ def global_rollcycle(self): @classmethod def create_from_dict(rollData, roll_data_dict: dict): - futures_instrument_roll_data = rollData(**roll_data_dict) return futures_instrument_roll_data def as_dict(self) -> dict: - return dict( hold_rollcycle=self.hold_rollcycle.cyclestring, priced_rollcycle=self.priced_rollcycle.cyclestring, @@ -404,7 +401,6 @@ def _valid_date_in_hold_rollcycle(self) -> bool: return self._valid_date_in_named_rollcycle("hold_rollcycle") def _valid_date_in_named_rollcycle(self, rollcycle_name: str) -> bool: - relevant_rollcycle = getattr(self.roll_parameters, rollcycle_name) current_month = self.contract_date.letter_month() diff --git a/sysobjects/spot_fx_prices.py b/sysobjects/spot_fx_prices.py index 8fae9e7541..1a748934e5 100644 --- a/sysobjects/spot_fx_prices.py +++ b/sysobjects/spot_fx_prices.py @@ -15,7 +15,6 @@ class fxPrices(pd.Series): """ def __init__(self, data): - super().__init__(data) data.index.name = "index" data.name = "" diff --git a/sysproduction/backup_db_to_csv.py b/sysproduction/backup_db_to_csv.py index fdbf29947a..cc13f8c13d 100644 --- a/sysproduction/backup_db_to_csv.py +++ b/sysproduction/backup_db_to_csv.py @@ -33,6 +33,7 @@ from sysproduction.data.production_data_objects import * + def backup_db_to_csv(): data = dataBlob(log_name="backup_db_to_csv") backup_object = backupDbToCsv(data) @@ -75,7 +76,6 @@ def backup_db_to_csv(self): def get_data_and_create_csv_directories(logname): - csv_dump_dir = get_csv_dump_dir() class_paths = dict( @@ -102,9 +102,7 @@ def get_data_and_create_csv_directories(logname): if not os.path.exists(dir_name): os.makedirs(dir_name) - data = dataBlob( - csv_data_paths=class_paths, log_name=logname - ) + data = dataBlob(csv_data_paths=class_paths, log_name=logname) data.add_class_list( [ @@ -123,8 +121,8 @@ def get_data_and_create_csv_directories(logname): csvSpreadsForInstrumentData, csvStrategyHistoricOrdersData, csvStrategyPositionData, - ] - , use_prefix="csv" + ], + use_prefix="csv", ) data.add_class_list( @@ -143,10 +141,9 @@ def get_data_and_create_csv_directories(logname): get_class_for_data_type(FUTURES_CONTRACT_DATA), get_class_for_data_type(OPTIMAL_POSITION_DATA), get_class_for_data_type(ROLL_STATE_DATA), - get_class_for_data_type(SPREAD_DATA) - + get_class_for_data_type(SPREAD_DATA), ], - use_prefix="db" + use_prefix="db", ) return data @@ -194,10 +191,8 @@ def backup_futures_contract_prices_for_contract_to_csv( return None - db_data = ( - data.db_futures_contract_price.get_merged_prices_for_contract_object( - futures_contract - ) + db_data = data.db_futures_contract_price.get_merged_prices_for_contract_object( + futures_contract ) csv_data = data.csv_futures_contract_price.get_merged_prices_for_contract_object( @@ -250,9 +245,7 @@ def backup_multiple_to_csv(data): def backup_multiple_to_csv_for_instrument(data, instrument_code: str): - db_data = data.db_futures_multiple_prices.get_multiple_prices( - instrument_code - ) + db_data = data.db_futures_multiple_prices.get_multiple_prices(instrument_code) csv_data = data.csv_futures_multiple_prices.get_multiple_prices(instrument_code) if check_df_equals(db_data, csv_data): @@ -279,9 +272,7 @@ def backup_adj_to_csv(data): def backup_adj_to_csv_for_instrument(data: dataBlob, instrument_code: str): - db_data = data.db_futures_adjusted_prices.get_adjusted_prices( - instrument_code - ) + db_data = data.db_futures_adjusted_prices.get_adjusted_prices(instrument_code) csv_data = data.csv_futures_adjusted_prices.get_adjusted_prices(instrument_code) if check_ts_equals(db_data, csv_data): @@ -427,7 +418,6 @@ def get_dict_of_strategy_capital(data: dataBlob) -> dict: def add_total_capital_to_strategy_capital_dict_return_df( data: dataBlob, capital_data: dict ) -> pd.DataFrame: - strategy_capital_as_df = pd.concat(capital_data, axis=1) total_capital = data.db_capital.get_df_of_all_global_capital() capital_data = pd.concat([strategy_capital_as_df, total_capital], axis=1) @@ -438,7 +428,6 @@ def add_total_capital_to_strategy_capital_dict_return_df( def backup_optimal_positions(data): - strategy_instrument_list = ( data.db_optimal_position.get_list_of_instrument_strategies_with_optimal_position() ) diff --git a/sysproduction/data/backtest.py b/sysproduction/data/backtest.py index 0c0daece46..692469492b 100644 --- a/sysproduction/data/backtest.py +++ b/sysproduction/data/backtest.py @@ -273,7 +273,6 @@ def get_directory_store_backtests(): def pickle_state(data, system, backtest_filename): - try: system.cache.pickle(backtest_filename) data.log.debug("Pickled backtest state to %s" % backtest_filename) diff --git a/sysproduction/data/broker.py b/sysproduction/data/broker.py index b4a49af3f3..c6b1607111 100644 --- a/sysproduction/data/broker.py +++ b/sysproduction/data/broker.py @@ -50,7 +50,6 @@ def __init__(self, data: dataBlob = arg_not_supplied): self._diag_controls = diagControlProcess() def _add_required_classes_to_data(self, data) -> dataBlob: - # Add a list of broker specific classes that will be aliased as self.data.broker_fx_prices, # self.data.broker_futures_contract_price ... and so on @@ -137,7 +136,6 @@ def get_cleaned_prices_at_frequency_for_contract_object( frequency: Frequency, cleaning_config=arg_not_supplied, ) -> futuresContractPrices: - broker_prices_raw = self.get_prices_at_frequency_for_contract_object( contract_object=contract_object, frequency=frequency ) @@ -155,7 +153,6 @@ def get_cleaned_prices_at_frequency_for_contract_object( def get_prices_at_frequency_for_potentially_expired_contract_object( self, contract_object: futuresContract, frequency: Frequency ) -> futuresContractPrices: - return self.broker_futures_contract_price_data.get_prices_at_frequency_for_potentially_expired_contract_object( contract=contract_object, freq=frequency ) @@ -163,7 +160,6 @@ def get_prices_at_frequency_for_potentially_expired_contract_object( def get_prices_at_frequency_for_contract_object( self, contract_object: futuresContract, frequency: Frequency ) -> futuresContractPrices: - return self.broker_futures_contract_price_data.get_prices_at_frequency_for_contract_object( contract_object, frequency, return_empty=False ) @@ -171,7 +167,6 @@ def get_prices_at_frequency_for_contract_object( def get_recent_bid_ask_tick_data_for_contract_object( self, contract: futuresContract ) -> dataFrameOfRecentTicks: - ticker = self.get_ticker_object_for_contract(contract) ticker_df = get_df_of_ticks_from_ticker_object(ticker) self.cancel_market_data_for_contract(contract) @@ -198,7 +193,6 @@ def get_brokers_instrument_with_metadata( def less_than_N_hours_of_trading_left_for_contract( self, contract: futuresContract, N_hours: float = 1.0 ) -> bool: - hours_left_before_process_finishes = ( self.diag_controls.how_long_in_hours_before_trading_process_finishes() ) @@ -328,7 +322,6 @@ def get_market_conditions_for_contract_order_by_leg( contract_order.futures_contract.as_list_of_individual_contracts() ) for contract, qty in zip(list_of_contracts, list_of_trade_qty): - market_conditions_this_contract = ( self.check_market_conditions_for_single_legged_contract_and_qty( contract, qty diff --git a/sysproduction/data/capital.py b/sysproduction/data/capital.py index 1fd4075a2a..06c52544e9 100644 --- a/sysproduction/data/capital.py +++ b/sysproduction/data/capital.py @@ -11,7 +11,10 @@ from sysdata.data_blob import dataBlob from sysproduction.data.generic_production_data import productionDataLayerGeneric -from sysproduction.data.production_data_objects import get_class_for_data_type, CAPITAL_DATA +from sysproduction.data.production_data_objects import ( + get_class_for_data_type, + CAPITAL_DATA, +) from systems.accounts.from_returns import account_curve_from_returns @@ -57,7 +60,6 @@ def check_for_total_capital_data(self) -> bool: def update_and_return_total_capital_with_new_broker_account_value( self, total_account_value_in_base_currency: float, check_limit: float = 0.1 ) -> float: - result = self.total_capital_calculator.update_and_return_total_capital_with_new_broker_account_value( total_account_value_in_base_currency, check_limit=check_limit ) @@ -83,7 +85,6 @@ def create_initial_capital( acc_pandl: float = arg_not_supplied, are_you_really_sure: bool = False, ): - self.total_capital_calculator.create_initial_capital( broker_account_value=broker_account_value, total_capital=total_capital, @@ -175,7 +176,6 @@ def update_capital_value_for_strategy( new_capital_value: float, date: datetime.datetime = arg_not_supplied, ): - self.db_capital_data.update_capital_value_for_strategy( strategy_name, new_capital_value, date=date ) diff --git a/sysproduction/data/contracts.py b/sysproduction/data/contracts.py index a08a79594f..d408384ef0 100644 --- a/sysproduction/data/contracts.py +++ b/sysproduction/data/contracts.py @@ -17,7 +17,13 @@ from sysproduction.data.prices import get_valid_instrument_code_from_user, diagPrices from sysproduction.data.generic_production_data import productionDataLayerGeneric -from sysproduction.data.production_data_objects import get_class_for_data_type, FUTURES_CONTRACT_PRICE_DATA, ROLL_PARAMETERS_DATA, FUTURES_MULTIPLE_PRICE_DATA, FUTURES_CONTRACT_DATA +from sysproduction.data.production_data_objects import ( + get_class_for_data_type, + FUTURES_CONTRACT_PRICE_DATA, + ROLL_PARAMETERS_DATA, + FUTURES_MULTIPLE_PRICE_DATA, + FUTURES_CONTRACT_DATA, +) from sysdata.data_blob import dataBlob missing_expiry = datetime.datetime(1900, 1, 1) @@ -30,7 +36,7 @@ def _add_required_classes_to_data(self, data) -> dataBlob: get_class_for_data_type(FUTURES_CONTRACT_PRICE_DATA), get_class_for_data_type(ROLL_PARAMETERS_DATA), get_class_for_data_type(FUTURES_MULTIPLE_PRICE_DATA), - get_class_for_data_type(FUTURES_CONTRACT_DATA) + get_class_for_data_type(FUTURES_CONTRACT_DATA), ] ) @@ -142,7 +148,6 @@ def get_all_sampled_contracts(self, instrument_code: str) -> listOfFuturesContra return sampled_contracts def get_labelled_dict_of_current_contracts(self, instrument_code: str) -> dict: - current_contracts = self.get_current_contract_dict(instrument_code) list_of_date_str, labelled_contracts = label_up_current_contracts( @@ -190,7 +195,6 @@ def get_contract_from_db(self, contract: futuresContract) -> futuresContract: def get_contract_from_db_given_code_and_id( self, instrument_code: str, contract_id: str ) -> futuresContract: - contract_object = self.db_contract_data.get_contract_object( instrument_code=instrument_code, contract_id=contract_id ) @@ -244,7 +248,6 @@ def when_to_roll_priced_contract(self, instrument_code: str) -> datetime.datetim def get_contract_date_object_with_roll_parameters( self, instrument_code: str, contract_date_str: str ) -> contractDateWithRollParameters: - roll_parameters = self.get_roll_parameters(instrument_code) contract_date = self._get_contract_date_object( instrument_code, contract_date_str @@ -279,7 +282,6 @@ def get_valid_contract_object_from_user( instrument_code: str = None, only_include_priced_contracts: bool = False, ) -> futuresContract: - ( instrument_code, contract_date_str, @@ -297,7 +299,6 @@ def get_valid_instrument_code_and_contractid_from_user( instrument_code: str = None, only_include_priced_contracts: bool = False, ) -> (str, str): - diag_contracts = dataContracts(data) invalid_input = True @@ -339,7 +340,6 @@ def get_valid_instrument_code_and_contractid_from_user( def get_dates_to_choose_from( data: dataBlob, instrument_code: str, only_priced_contracts: bool = False ) -> listOfContractDateStr: - diag_contracts = dataContracts(data) diag_prices = diagPrices(data) if only_priced_contracts: @@ -381,7 +381,6 @@ def label_up_contracts_with_date_list( contract_names = [] for contract in contract_date_list: - if contract == price_contract_date: suffix = PRICE_SUFFIX elif contract == forward_contract_date: diff --git a/sysproduction/data/control_process.py b/sysproduction/data/control_process.py index ea7c66e979..f50927c502 100644 --- a/sysproduction/data/control_process.py +++ b/sysproduction/data/control_process.py @@ -13,7 +13,11 @@ from sysproduction.data.generic_production_data import productionDataLayerGeneric -from sysproduction.data.production_data_objects import get_class_for_data_type, PROCESS_CONTROL_DATA +from sysproduction.data.production_data_objects import ( + get_class_for_data_type, + PROCESS_CONTROL_DATA, +) + DEFAULT_METHOD_FREQUENCY = 60 DEFAULT_MAX_EXECUTIONS = 1 DEFAULT_START_TIME_STRING = "00:01" @@ -290,7 +294,6 @@ def get_start_time(self, process_name: str) -> datetime.time: return result def how_long_in_hours_before_trading_process_finishes(self) -> float: - now_datetime = datetime.datetime.now() now_date = now_datetime.date() diff --git a/sysproduction/data/controls.py b/sysproduction/data/controls.py index 7a1e64726e..7276d3933f 100644 --- a/sysproduction/data/controls.py +++ b/sysproduction/data/controls.py @@ -132,7 +132,6 @@ def db_trade_limit_data(self) -> tradeLimitData: def what_trade_is_possible_for_strategy_instrument( self, instrument_strategy: instrumentStrategy, proposed_trade: tradeQuantity ) -> int: - proposed_trade_qty = proposed_trade.total_abs_qty() possible_trade = self.what_trade_qty_possible_for_instrument_strategy( instrument_strategy=instrument_strategy, @@ -144,7 +143,6 @@ def what_trade_is_possible_for_strategy_instrument( def what_trade_qty_possible_for_instrument_strategy( self, instrument_strategy: instrumentStrategy, proposed_trade_qty: int ) -> int: - possible_trade = ( self.db_trade_limit_data.what_trade_is_possible_for_instrument_strategy( instrument_strategy, proposed_trade_qty @@ -156,7 +154,6 @@ def what_trade_qty_possible_for_instrument_strategy( def what_trade_qty_possible_for_instrument_code( self, instrument_code, proposed_trade_qty: int ) -> int: - possible_trade = self.db_trade_limit_data.what_trade_is_possible_for_instrument( instrument_code=instrument_code, proposed_trade=proposed_trade_qty ) @@ -267,7 +264,6 @@ def get_dict_of_all_overrides_in_db_with_reasons(self) -> dict: def get_cumulative_override_for_instrument_strategy( self, instrument_strategy: instrumentStrategy ) -> Override: - cumulative_override_from_db = ( self.get_cumulative_override_for_instrument_strategy_from_db( instrument_strategy @@ -529,7 +525,6 @@ def db_temporary_close_data(self) -> temporaryCloseData: return self.data.db_temporary_close def apply_position_limit_to_order(self, order: instrumentOrder) -> instrumentOrder: - list_of_orders = self._get_list_of_orders_after_position_limits_applied(order) ## use instrument position as it includes everything @@ -548,7 +543,6 @@ def apply_position_limit_to_order(self, order: instrumentOrder) -> instrumentOrd def _get_list_of_orders_after_position_limits_applied( self, order: instrumentOrder ) -> listOfOrders: - instrument_strategy = order.instrument_strategy instrument_code = instrument_strategy.instrument_code @@ -573,7 +567,6 @@ def _get_list_of_orders_after_position_limits_applied( def _apply_instrument_strategy_position_limit_to_order( self, instrument_strategy: instrumentStrategy, order: instrumentOrder ) -> instrumentOrder: - position_and_limit = self._get_limit_and_position_for_instrument_strategy( instrument_strategy ) @@ -587,7 +580,6 @@ def _apply_instrument_strategy_position_limit_to_order( def get_maximum_position_contracts_for_instrument_strategy( self, instrument_strategy: instrumentStrategy ) -> int: - ## FIXME: THIS WON'T WORK IF THERE ARE MULTIPLE STRATEGIES TRADING AN INSTRUMENT limit_for_instrument = self._get_position_limit_object_for_instrument( @@ -639,7 +631,6 @@ def _get_current_position_for_instrument_strategy( def _apply_instrument_position_limit_to_order( self, instrument_code: str, order: instrumentOrder ) -> instrumentOrder: - position_and_limit = self._get_limit_and_position_for_instrument( instrument_code ) @@ -808,7 +799,6 @@ def reset_position_limit_for_instrument_to_original_value(self, instrument_code) def set_position_limit_for_instrument_strategy( self, instrument_strategy: instrumentStrategy, new_position_limit: int ): - self.db_position_limit_data.set_position_limit_for_instrument_strategy( instrument_strategy, new_position_limit ) @@ -816,7 +806,6 @@ def set_position_limit_for_instrument_strategy( def set_abs_position_limit_for_instrument( self, instrument_code: str, new_position_limit: int ): - self.db_position_limit_data.set_position_limit_for_instrument( instrument_code, new_position_limit ) @@ -824,13 +813,11 @@ def set_abs_position_limit_for_instrument( def delete_position_limit_for_instrument_strategy( self, instrument_strategy: instrumentStrategy ): - self.db_position_limit_data.delete_position_limit_for_instrument_strategy( instrument_strategy ) def delete_position_limit_for_instrument(self, instrument_code: str): - self.db_position_limit_data.delete_position_limit_for_instrument( instrument_code ) diff --git a/sysproduction/data/currency_data.py b/sysproduction/data/currency_data.py index 754d998242..70a199e902 100644 --- a/sysproduction/data/currency_data.py +++ b/sysproduction/data/currency_data.py @@ -9,6 +9,7 @@ from sysproduction.data.generic_production_data import productionDataLayerGeneric from sysproduction.data.production_data_objects import get_class_for_data_type, FX_DATA + class dataCurrency(productionDataLayerGeneric): def _add_required_classes_to_data(self, data: dataBlob) -> dataBlob: data.add_class_object(get_class_for_data_type(FX_DATA)) diff --git a/sysproduction/data/generic_production_data.py b/sysproduction/data/generic_production_data.py index 67cc084857..57e15ac701 100644 --- a/sysproduction/data/generic_production_data.py +++ b/sysproduction/data/generic_production_data.py @@ -25,5 +25,4 @@ def log(self): return self.data.log def _add_required_classes_to_data(self, data) -> dataBlob: - return data diff --git a/sysproduction/data/instruments.py b/sysproduction/data/instruments.py index 824776dfa2..50a5f66f55 100644 --- a/sysproduction/data/instruments.py +++ b/sysproduction/data/instruments.py @@ -1,4 +1,3 @@ - from sysdata.data_blob import dataBlob from sysdata.futures.instruments import futuresInstrumentData from sysdata.futures.spread_costs import spreadCostData @@ -9,7 +8,12 @@ from sysproduction.data.currency_data import dataCurrency from sysproduction.data.generic_production_data import productionDataLayerGeneric from sysproduction.data.config import get_list_of_stale_instruments -from sysproduction.data.production_data_objects import STORED_SPREAD_DATA, get_class_for_data_type, FUTURES_INSTRUMENT_DATA +from sysproduction.data.production_data_objects import ( + STORED_SPREAD_DATA, + get_class_for_data_type, + FUTURES_INSTRUMENT_DATA, +) + class updateSpreadCosts(productionDataLayerGeneric): def _add_required_classes_to_data(self, data) -> dataBlob: @@ -33,9 +37,12 @@ def db_spread_cost_data(self) -> spreadCostData: class diagInstruments(productionDataLayerGeneric): def _add_required_classes_to_data(self, data: dataBlob) -> dataBlob: - data.add_class_list([ - get_class_for_data_type(FUTURES_INSTRUMENT_DATA), - get_class_for_data_type(STORED_SPREAD_DATA)]) + data.add_class_list( + [ + get_class_for_data_type(FUTURES_INSTRUMENT_DATA), + get_class_for_data_type(STORED_SPREAD_DATA), + ] + ) return data diff --git a/sysproduction/data/optimal_positions.py b/sysproduction/data/optimal_positions.py index c11ec2bfbc..cba1967751 100644 --- a/sysproduction/data/optimal_positions.py +++ b/sysproduction/data/optimal_positions.py @@ -17,7 +17,11 @@ get_list_of_stale_instruments, get_list_of_stale_strategies, ) -from sysproduction.data.production_data_objects import get_class_for_data_type, OPTIMAL_POSITION_DATA +from sysproduction.data.production_data_objects import ( + get_class_for_data_type, + OPTIMAL_POSITION_DATA, +) + class dataOptimalPositions(productionDataLayerGeneric): def _add_required_classes_to_data(self, data) -> dataBlob: @@ -28,7 +32,6 @@ def _add_required_classes_to_data(self, data) -> dataBlob: def get_list_of_current_optimal_positions_for_strategy_name( self, strategy_name: str ) -> listOfOptimalPositionsAcrossInstrumentStrategies: - all_optimal_positions = self.get_list_of_optimal_positions() optimal_positions_for_strategy = all_optimal_positions.filter_by_strategy( strategy_name @@ -51,7 +54,6 @@ def get_list_of_instruments_for_strategy_with_optimal_position( return list_of_instruments def get_list_of_strategies_with_optimal_position(self) -> list: - list_of_strategies = ( self.db_optimal_position_data.list_of_strategies_with_optimal_position() ) @@ -62,7 +64,6 @@ def get_list_of_strategies_with_optimal_position(self) -> list: def get_current_optimal_position_for_instrument_strategy( self, instrument_strategy: instrumentStrategy, raw_positions=False ) -> baseOptimalPosition: - if raw_positions: use_instrument_strategy = instrument_strategy_with_raw_tag( instrument_strategy @@ -79,7 +80,6 @@ def get_current_optimal_position_for_instrument_strategy( def get_optimal_position_as_df_for_instrument_strategy( self, instrument_strategy: instrumentStrategy ) -> pd.DataFrame: - df_object = self.db_optimal_position_data.get_optimal_position_as_df_for_instrument_strategy( instrument_strategy ) @@ -106,7 +106,6 @@ def update_optimal_position_for_instrument_strategy( def get_list_of_optimal_positions( self, ) -> listOfOptimalPositionsAcrossInstrumentStrategies: - ## drop stale markets list_of_optimal_positions_and_instrument_strategies = ( self.db_optimal_position_data.get_list_of_optimal_positions() @@ -140,7 +139,6 @@ def get_list_of_optimal_position_breaks(self) -> list: def get_list_of_optimal_and_current_positions( self, ) -> listOfOptimalAndCurrentPositionsAcrossInstrumentStrategies: - optimal_positions = self.get_list_of_optimal_positions() position_data = diagPositions(self.data) @@ -180,7 +178,6 @@ def is_raw_strategy(strategy_name: str) -> bool: def remove_raw_from_list_of_optimal_positions_and_instrument_strategies( list_of_optimal_positions_and_instrument_strategies: listOfOptimalPositionsAcrossInstrumentStrategies, ) -> listOfOptimalPositionsAcrossInstrumentStrategies: - list_of_optimal_positions_and_instrument_strategies = [ optimal_position_and_instrument_strategy for optimal_position_and_instrument_strategy in list_of_optimal_positions_and_instrument_strategies @@ -197,7 +194,6 @@ def remove_raw_from_list_of_optimal_positions_and_instrument_strategies( def is_not_raw_optimal_position_and_instrument_strategy( optimal_position_and_instrument_strategy: instrumentStrategyAndOptimalPosition, ) -> bool: - return is_not_raw_instrument_strategy( optimal_position_and_instrument_strategy.instrument_strategy ) @@ -227,7 +223,6 @@ def strategy_name_with_raw_tag(strategy_name: str) -> str: def remove_stale_strategies_and_instruments_from_list_of_optimal_positions_and_instrument_strategies( list_of_optimal_positions_and_instrument_strategies: listOfOptimalPositionsAcrossInstrumentStrategies, ) -> listOfOptimalPositionsAcrossInstrumentStrategies: - filtered_list = remove_stale_strategies_from_list_of_optimal_positions_and_instrument_strategies( list_of_optimal_positions_and_instrument_strategies=list_of_optimal_positions_and_instrument_strategies, ) @@ -242,7 +237,6 @@ def remove_stale_strategies_and_instruments_from_list_of_optimal_positions_and_i def remove_stale_strategies_from_list_of_optimal_positions_and_instrument_strategies( list_of_optimal_positions_and_instrument_strategies: listOfOptimalPositionsAcrossInstrumentStrategies, ) -> listOfOptimalPositionsAcrossInstrumentStrategies: - list_of_stale_strategies = get_list_of_stale_strategies() new_list = ( list_of_optimal_positions_and_instrument_strategies.filter_removing_strategies( @@ -256,7 +250,6 @@ def remove_stale_strategies_from_list_of_optimal_positions_and_instrument_strate def remove_stale_instruments_from_list_of_optimal_positions_and_instrument_strategies( list_of_optimal_positions_and_instrument_strategies: listOfOptimalPositionsAcrossInstrumentStrategies, ) -> listOfOptimalPositionsAcrossInstrumentStrategies: - list_of_stale_instruments = get_list_of_stale_instruments() new_list = ( list_of_optimal_positions_and_instrument_strategies.filter_removing_instruments( diff --git a/sysproduction/data/orders.py b/sysproduction/data/orders.py index 634e130c61..d7eb4cc95e 100644 --- a/sysproduction/data/orders.py +++ b/sysproduction/data/orders.py @@ -24,7 +24,15 @@ from sysobjects.production.tradeable_object import instrumentStrategy, futuresContract -from sysproduction.data.production_data_objects import get_class_for_data_type, INSTRUMENT_ORDER_STACK_DATA, CONTRACT_ORDER_STACK_DATA, BROKER_HISTORIC_ORDERS_DATA, STRATEGY_HISTORIC_ORDERS_DATA, CONTRACT_HISTORIC_ORDERS_DATA, BROKER_ORDER_STACK_DATA +from sysproduction.data.production_data_objects import ( + get_class_for_data_type, + INSTRUMENT_ORDER_STACK_DATA, + CONTRACT_ORDER_STACK_DATA, + BROKER_HISTORIC_ORDERS_DATA, + STRATEGY_HISTORIC_ORDERS_DATA, + CONTRACT_HISTORIC_ORDERS_DATA, + BROKER_ORDER_STACK_DATA, +) class dataOrders(object): @@ -39,7 +47,7 @@ def __init__(self, data: dataBlob = arg_not_supplied): get_class_for_data_type(BROKER_ORDER_STACK_DATA), get_class_for_data_type(STRATEGY_HISTORIC_ORDERS_DATA), get_class_for_data_type(CONTRACT_HISTORIC_ORDERS_DATA), - get_class_for_data_type(BROKER_HISTORIC_ORDERS_DATA) + get_class_for_data_type(BROKER_HISTORIC_ORDERS_DATA), ] ) self._data = data @@ -112,7 +120,6 @@ def get_historic_broker_order_ids_in_date_range( def get_historic_contract_order_ids_in_date_range( self, period_start: datetime.datetime, period_end: datetime.datetime ) -> list: - order_id_list = ( self.db_contract_historic_orders_data.get_list_of_order_ids_in_date_range( period_start, period_end @@ -124,7 +131,6 @@ def get_historic_contract_order_ids_in_date_range( def get_historic_instrument_order_ids_in_date_range( self, period_start: datetime.datetime, period_end: datetime.datetime ) -> list: - order_id_list = ( self.db_strategy_historic_orders_data.get_list_of_order_ids_in_date_range( period_start, period_end @@ -174,7 +180,6 @@ def get_fills_history_for_instrument_strategy( def get_historic_broker_order_from_order_id_with_execution_data( self, order_id: int ) -> brokerOrderWithParentInformation: - order = self.get_historic_broker_order_from_order_id(order_id) contract_order = self.get_parent_contract_order_for_historic_broker_order_id( diff --git a/sysproduction/data/positions.py b/sysproduction/data/positions.py index 7b970f9497..c72e6c6dca 100644 --- a/sysproduction/data/positions.py +++ b/sysproduction/data/positions.py @@ -37,7 +37,13 @@ from sysproduction.data.generic_production_data import productionDataLayerGeneric from sysproduction.data.contracts import dataContracts -from sysproduction.data.production_data_objects import get_class_for_data_type, ROLL_STATE_DATA, STRATEGY_POSITION_DATA, CONTRACT_POSITION_DATA +from sysproduction.data.production_data_objects import ( + get_class_for_data_type, + ROLL_STATE_DATA, + STRATEGY_POSITION_DATA, + CONTRACT_POSITION_DATA, +) + class diagPositions(productionDataLayerGeneric): def _add_required_classes_to_data(self, data) -> dataBlob: @@ -45,8 +51,8 @@ def _add_required_classes_to_data(self, data) -> dataBlob: [ get_class_for_data_type(ROLL_STATE_DATA), get_class_for_data_type(STRATEGY_POSITION_DATA), - get_class_for_data_type(CONTRACT_POSITION_DATA) - ] + get_class_for_data_type(CONTRACT_POSITION_DATA), + ] ) return data @@ -163,7 +169,6 @@ def get_dict_of_actual_positions_for_strategy( return actual_positions def get_position_series_for_contract(self, contract: futuresContract) -> pd.Series: - df_object = ( self.db_contract_position_data.get_position_as_series_for_contract_object( contract @@ -175,7 +180,6 @@ def get_position_series_for_contract(self, contract: futuresContract) -> pd.Seri def get_position_series_for_instrument_strategy( self, instrument_strategy: instrumentStrategy ) -> pd.Series: - position_series = self.db_strategy_position_data.get_position_as_series_for_instrument_strategy_object( instrument_strategy ) @@ -185,7 +189,6 @@ def get_position_series_for_instrument_strategy( def get_positions_for_instrument_and_contract_list( self, instrument_code: str, list_of_contract_date_str: list ) -> list: - list_of_contracts = [ futuresContract(instrument_code, contract_date_str) for contract_date_str in list_of_contract_date_str @@ -198,7 +201,6 @@ def get_positions_for_instrument_and_contract_list( return list_of_positions def get_position_for_contract(self, contract: futuresContract) -> float: - position = ( self.db_contract_position_data.get_current_position_for_contract_object( contract @@ -210,7 +212,6 @@ def get_position_for_contract(self, contract: futuresContract) -> float: def get_current_position_for_instrument_strategy( self, instrument_strategy: instrumentStrategy ) -> int: - position = self.db_strategy_position_data.get_current_position_for_instrument_strategy_object( instrument_strategy ) @@ -220,7 +221,6 @@ def get_current_position_for_instrument_strategy( def get_list_of_instruments_for_strategy_with_position( self, strategy_name: str, ignore_zero_positions=True ) -> List[str]: - instrument_list = self.db_strategy_position_data.get_list_of_instruments_for_strategy_with_position( strategy_name, ignore_zero_positions=ignore_zero_positions ) @@ -276,7 +276,6 @@ def get_all_current_contract_positions(self) -> listOfContractPositions: def update_expiries_for_position_list( self, original_position_list: listOfContractPositions ) -> listOfContractPositions: - new_position_list = listOfContractPositions() for position_entry in original_position_list: new_position_entry = self.update_expiry_for_single_position(position_entry) @@ -367,7 +366,6 @@ def get_list_of_contracts_with_any_contract_position_for_instrument_in_date_rang start_date: datetime.datetime, end_date: datetime.datetime = arg_not_supplied, ) -> list: - if end_date is arg_not_supplied: end_date = datetime.datetime.now() @@ -390,10 +388,11 @@ def get_position_in_priced_contract_for_instrument( class updatePositions(productionDataLayerGeneric): def _add_required_classes_to_data(self, data) -> dataBlob: - data.add_class_list([ - get_class_for_data_type(ROLL_STATE_DATA), - get_class_for_data_type(STRATEGY_POSITION_DATA), - get_class_for_data_type(CONTRACT_POSITION_DATA) + data.add_class_list( + [ + get_class_for_data_type(ROLL_STATE_DATA), + get_class_for_data_type(STRATEGY_POSITION_DATA), + get_class_for_data_type(CONTRACT_POSITION_DATA), ] ) return data @@ -516,7 +515,6 @@ def update_contract_position_table_with_contract_order( def _update_positions_for_individual_contract_leg( self, contract: futuresContract, trade_done: int, time_date: datetime.datetime ): - current_position = self.diag_positions.get_position_for_contract(contract) new_position = current_position + trade_done diff --git a/sysproduction/data/prices.py b/sysproduction/data/prices.py index 9f53a149d0..8eba856c6e 100644 --- a/sysproduction/data/prices.py +++ b/sysproduction/data/prices.py @@ -35,7 +35,14 @@ ## default for spike checking from sysproduction.data.instruments import diagInstruments, get_block_size -from sysproduction.data.production_data_objects import get_class_for_data_type, FUTURES_CONTRACT_PRICE_DATA, FUTURES_ADJUSTED_PRICE_DATA, FUTURES_MULTIPLE_PRICE_DATA, FUTURES_CONTRACT_DATA, HISTORIC_SPREAD_DATA +from sysproduction.data.production_data_objects import ( + get_class_for_data_type, + FUTURES_CONTRACT_PRICE_DATA, + FUTURES_ADJUSTED_PRICE_DATA, + FUTURES_MULTIPLE_PRICE_DATA, + FUTURES_CONTRACT_DATA, + HISTORIC_SPREAD_DATA, +) VERY_BIG_NUMBER = 999999.0 @@ -48,7 +55,7 @@ def _add_required_classes_to_data(self, data) -> dataBlob: get_class_for_data_type(FUTURES_ADJUSTED_PRICE_DATA), get_class_for_data_type(FUTURES_CONTRACT_DATA), get_class_for_data_type(HISTORIC_SPREAD_DATA), - get_class_for_data_type(FUTURES_MULTIPLE_PRICE_DATA) + get_class_for_data_type(FUTURES_MULTIPLE_PRICE_DATA), ] ) return data @@ -243,7 +250,7 @@ def _add_required_classes_to_data(self, data) -> dataBlob: get_class_for_data_type(FUTURES_MULTIPLE_PRICE_DATA), get_class_for_data_type(FUTURES_CONTRACT_DATA), get_class_for_data_type(FUTURES_ADJUSTED_PRICE_DATA), - get_class_for_data_type(HISTORIC_SPREAD_DATA) + get_class_for_data_type(HISTORIC_SPREAD_DATA), ] ) @@ -254,7 +261,6 @@ def overwrite_merged_prices_for_contract( contract_object: futuresContract, new_prices: futuresContractPrices, ): - self.db_futures_contract_price_data.write_merged_prices_for_contract_object( contract_object, futures_price_data=new_prices, ignore_duplication=True ) @@ -265,7 +271,6 @@ def overwrite_prices_at_frequency_for_contract( new_prices: futuresContractPrices, frequency: Frequency, ): - self.db_futures_contract_price_data.write_prices_at_frequency_for_contract_object( futures_contract_object=contract_object, futures_price_data=new_prices, @@ -281,7 +286,6 @@ def update_prices_at_frequency_for_contract( check_for_spike: bool = True, max_price_spike: float = VERY_BIG_NUMBER, ) -> int: - error_or_rows_added = ( self.db_futures_contract_price_data.update_prices_at_frequency_for_contract( contract_object=contract_object, @@ -498,7 +502,6 @@ def modify_price_when_contract_has_changed( original_contract_date: str, original_price: float, ) -> float: - if original_contract_date == new_contract_date: return original_price diff --git a/sysproduction/data/production_data_objects.py b/sysproduction/data/production_data_objects.py index 99399b77df..d3f4d68419 100644 --- a/sysproduction/data/production_data_objects.py +++ b/sysproduction/data/production_data_objects.py @@ -1,12 +1,18 @@ from sysdata.parquet.parquet_adjusted_prices import parquetFuturesAdjustedPricesData from sysdata.parquet.parquet_capital import parquetCapitalData -from sysdata.parquet.parquet_futures_per_contract_prices import parquetFuturesContractPriceData +from sysdata.parquet.parquet_futures_per_contract_prices import ( + parquetFuturesContractPriceData, +) from sysdata.parquet.parquet_multiple_prices import parquetFuturesMultiplePricesData from sysdata.parquet.parquet_spotfx_prices import parquetFxPricesData from sysdata.parquet.parquet_spreads import parquetSpreadsForInstrumentData from sysdata.parquet.parquet_optimal_positions import parquetOptimalPositionData -from sysdata.parquet.parquet_historic_strategy_positions import parquetStrategyPositionData -from sysdata.parquet.parquet_historic_contract_positions import parquetContractPositionData +from sysdata.parquet.parquet_historic_strategy_positions import ( + parquetStrategyPositionData, +) +from sysdata.parquet.parquet_historic_contract_positions import ( + parquetContractPositionData, +) """ from sysdata.arctic.arctic_adjusted_prices import arcticFuturesAdjustedPricesData @@ -47,11 +53,11 @@ STRATEGY_POSITION_DATA = "strategy_position_data" OPTIMAL_POSITION_DATA = "optimal_position_data" HISTORIC_SPREAD_DATA = "historic_spread_data" -STORED_SPREAD_DATA ="stored_spread_data" +STORED_SPREAD_DATA = "stored_spread_data" FX_DATA = "fx_data" ROLL_PARAMETERS_DATA = "roll_parameters_data" FUTURES_CONTRACT_DATA = "futures_contract_data" -PROCESS_CONTROL_DATA= "process_control_data" +PROCESS_CONTROL_DATA = "process_control_data" FUTURES_INSTRUMENT_DATA = "futures_instrument_data" INSTRUMENT_ORDER_STACK_DATA = "instrument_order_stack_data" CONTRACT_ORDER_STACK_DATA = "contract_order_stack_data" @@ -67,32 +73,24 @@ FUTURES_INSTRUMENT_DATA: csvFuturesInstrumentData, FUTURES_CONTRACT_DATA: mongoFuturesContractData, STORED_SPREAD_DATA: mongoSpreadCostData, - FUTURES_CONTRACT_PRICE_DATA: parquetFuturesContractPriceData, FUTURES_MULTIPLE_PRICE_DATA: parquetFuturesMultiplePricesData, FUTURES_ADJUSTED_PRICE_DATA: parquetFuturesAdjustedPricesData, - CAPITAL_DATA: parquetCapitalData, - CONTRACT_POSITION_DATA: parquetContractPositionData, STRATEGY_POSITION_DATA: parquetStrategyPositionData, OPTIMAL_POSITION_DATA: parquetOptimalPositionData, HISTORIC_SPREAD_DATA: parquetSpreadsForInstrumentData, - STRATEGY_HISTORIC_ORDERS_DATA: mongoStrategyHistoricOrdersData, CONTRACT_HISTORIC_ORDERS_DATA: mongoContractHistoricOrdersData, BROKER_HISTORIC_ORDERS_DATA: mongoBrokerHistoricOrdersData, - INSTRUMENT_ORDER_STACK_DATA: mongoInstrumentOrderStackData, CONTRACT_ORDER_STACK_DATA: mongoContractOrderStackData, BROKER_ORDER_STACK_DATA: mongoBrokerOrderStackData, - ROLL_STATE_DATA: mongoRollStateData, - - PROCESS_CONTROL_DATA: mongoControlProcessData + PROCESS_CONTROL_DATA: mongoControlProcessData, } -def get_class_for_data_type(data_type:str): +def get_class_for_data_type(data_type: str): return use_production_classes[data_type] - diff --git a/sysproduction/data/reports.py b/sysproduction/data/reports.py index 5d18dcf8e8..30273226ab 100644 --- a/sysproduction/data/reports.py +++ b/sysproduction/data/reports.py @@ -35,7 +35,6 @@ def get_default_reporting_config_dict(self) -> dict: def populate_reporting_config_from_yaml_input( config_dict_from_yaml: dict, default_config: dict ) -> dict: - if len(config_dict_from_yaml) == 0: return default_config @@ -60,7 +59,6 @@ def populate_reporting_config_from_yaml_input( def _resolve_config_for_named_report( report_name: str, config_dict_from_yaml: dict, default_config: dict ) -> reportConfig: - default_config_for_report = default_config[report_name] new_config_for_report = config_dict_from_yaml[report_name] @@ -78,7 +76,6 @@ def _resolve_config_for_named_report( def _resolve_config_from_config_pair( default_config_for_report: reportConfig, new_config_for_report: dict ) -> reportConfig: - new_config = copy(default_config_for_report) attr_names = new_config_for_report.keys() for attribute in attr_names: diff --git a/sysproduction/data/risk.py b/sysproduction/data/risk.py index 471f4c98fa..3f29e86dba 100644 --- a/sysproduction/data/risk.py +++ b/sysproduction/data/risk.py @@ -28,7 +28,6 @@ def get_covariance_matrix_for_instrument_returns( list_of_instruments: list, passed_correlation_estimation_parameters: dict = arg_not_supplied, ) -> covarianceEstimate: - corr_matrix = get_correlation_matrix_for_instrument_returns( data, list_of_instruments, @@ -50,7 +49,6 @@ def get_correlation_matrix_for_instrument_returns( list_of_instruments: list, passed_correlation_estimation_parameters: dict = arg_not_supplied, ) -> correlationEstimate: - list_of_correlations = _replicate_creation_of_correlation_list_in_sim( data, list_of_instruments, @@ -67,7 +65,6 @@ def _replicate_creation_of_correlation_list_in_sim( list_of_instruments: list, passed_correlation_estimation_parameters: dict = arg_not_supplied, ): - ## double coding but too complex to do differently returns_as_pd = get_perc_returns_across_instruments(data, list_of_instruments) diff --git a/sysproduction/data/sim_data.py b/sysproduction/data/sim_data.py index 03d3db4cb9..8a6a1d1615 100644 --- a/sysproduction/data/sim_data.py +++ b/sysproduction/data/sim_data.py @@ -3,7 +3,16 @@ from sysdata.sim.db_futures_sim_data import dbFuturesSimData from sysdata.data_blob import dataBlob -from sysproduction.data.production_data_objects import get_class_for_data_type, FUTURES_ADJUSTED_PRICE_DATA, FUTURES_MULTIPLE_PRICE_DATA, FX_DATA, STORED_SPREAD_DATA, FUTURES_INSTRUMENT_DATA, ROLL_PARAMETERS_DATA +from sysproduction.data.production_data_objects import ( + get_class_for_data_type, + FUTURES_ADJUSTED_PRICE_DATA, + FUTURES_MULTIPLE_PRICE_DATA, + FX_DATA, + STORED_SPREAD_DATA, + FUTURES_INSTRUMENT_DATA, + ROLL_PARAMETERS_DATA, +) + def get_sim_data_object_for_production(data=arg_not_supplied) -> dbFuturesSimData: # Check data has the right elements to do this @@ -17,7 +26,7 @@ def get_sim_data_object_for_production(data=arg_not_supplied) -> dbFuturesSimDat get_class_for_data_type(FX_DATA), get_class_for_data_type(STORED_SPREAD_DATA), get_class_for_data_type(FUTURES_INSTRUMENT_DATA), - get_class_for_data_type(ROLL_PARAMETERS_DATA) + get_class_for_data_type(ROLL_PARAMETERS_DATA), ] ) diff --git a/sysproduction/data/volumes.py b/sysproduction/data/volumes.py index 532daa7872..3a28dc41c2 100644 --- a/sysproduction/data/volumes.py +++ b/sysproduction/data/volumes.py @@ -6,7 +6,10 @@ from sysdata.data_blob import dataBlob from sysproduction.data.generic_production_data import productionDataLayerGeneric -from sysproduction.data.production_data_objects import FUTURES_CONTRACT_PRICE_DATA, get_class_for_data_type +from sysproduction.data.production_data_objects import ( + FUTURES_CONTRACT_PRICE_DATA, + get_class_for_data_type, +) # Get volume data for the contract we're currently trading, plus what we might roll into, plus the previous one # This is handy for working out whether to roll @@ -60,7 +63,6 @@ def get_smoothed_volumes_of_contract_list( def get_smoothed_volume_for_contract( self, instrument_code: str, contract_date_str: str ) -> float: - contract = futuresContract(instrument_code, contract_date_str) try: volumes = self.get_daily_volumes_for_contract(contract) diff --git a/sysproduction/interactive_controls.py b/sysproduction/interactive_controls.py index ca097d4ba8..1092c445a2 100644 --- a/sysproduction/interactive_controls.py +++ b/sysproduction/interactive_controls.py @@ -259,7 +259,6 @@ def set_trade_limit_for_instrument( period_days: int, auto_parameters: parametersForAutoPopulation, ): - trade_limits = dataTradeLimits(data) new_limit = calc_trade_limit_for_instrument( data, @@ -356,7 +355,6 @@ def get_auto_population_parameters() -> parametersForAutoPopulation: def get_maximum_position_at_max_forecast( data: dataBlob, instrument_code: str, auto_parameters: parametersForAutoPopulation ) -> float: - risk_data = get_risk_data_for_instrument(data, instrument_code) position_for_risk = get_standardised_position_for_risk( risk_data, auto_parameters=auto_parameters @@ -398,7 +396,6 @@ def get_maximum_position_at_max_forecast( def get_standardised_position_for_risk( risk_data: dict, auto_parameters: parametersForAutoPopulation ) -> float: - capital = risk_data["capital"] annual_risk_per_contract = risk_data["annual_risk_per_contract"] if np.isnan(annual_risk_per_contract): @@ -472,7 +469,6 @@ def get_maximum_position_given_leverage_limit( def get_maximum_position_given_risk_concentration_limit( risk_data: dict, auto_parameters: parametersForAutoPopulation ) -> float: - ccy_risk_per_contract = abs(risk_data["annual_risk_per_contract"]) if np.isnan(ccy_risk_per_contract): print("Can't get risk per contract, Max position exposure limit will be zero") @@ -513,7 +509,6 @@ def get_maximum_position_given_risk_concentration_limit( def get_max_position_give_volume_limit( data: dataBlob, instrument_code: str, auto_parameters: parametersForAutoPopulation ) -> float: - max_proportion_of_volume = auto_parameters.max_proportion_of_volume volume_for_instrument = get_best_average_daily_volume_for_instrument( data, instrument_code @@ -531,7 +526,6 @@ def get_max_position_give_volume_limit( def view_position_limit(data): - data_position_limits = dataPositionLimits(data) instrument_limits = data_position_limits.get_all_instrument_limits_and_positions() strategy_instrument_limits = ( @@ -612,7 +606,6 @@ def auto_populate_position_limits(data: dataBlob): def set_position_limit_for_instrument( data, instrument_code: str, auto_parameters: parametersForAutoPopulation ): - data_position_limits = dataPositionLimits(data) existing_position_limit = ( data_position_limits._get_position_limit_object_for_instrument(instrument_code) @@ -643,7 +636,6 @@ def set_position_limit_for_instrument( def get_max_rounded_position_for_instrument( data, instrument_code: str, auto_parameters: parametersForAutoPopulation ): - max_position = get_maximum_position_at_max_forecast( data, instrument_code=instrument_code, auto_parameters=auto_parameters ) @@ -854,7 +846,6 @@ def get_slippage_data(data) -> pd.DataFrame: def get_list_of_changes_to_make_to_slippage( slippage_comparison_pd: pd.DataFrame, ) -> dict: - filter = get_filter_size_for_slippage() changes_to_make = dict() instrument_list = slippage_comparison_pd.index @@ -975,7 +966,6 @@ def check_price_multipliers_consistent(data: dataBlob): def check_price_multipliers_consistent_for_instrument( data: dataBlob, instrument_code: str ): - print("Checking %s" % instrument_code) data_broker = dataBroker(data) diag_instruments = diagInstruments(data) diff --git a/sysproduction/interactive_diagnostics.py b/sysproduction/interactive_diagnostics.py index 6a652948fb..0aec5b4502 100644 --- a/sysproduction/interactive_diagnostics.py +++ b/sysproduction/interactive_diagnostics.py @@ -200,7 +200,6 @@ def reconcile_report(data): def strategy_report(data): - strategy_name = get_valid_strategy_name_from_user( data=data, allow_all=True, all_code=ALL_STRATEGIES ) @@ -259,7 +258,6 @@ def remove_markets_report(data): def market_monitor_report(data): - run_full_report = true_if_answer_is_yes( "Run normal full report? (alternative is customise dates)" ) @@ -698,7 +696,6 @@ def check_trading_hours_one_day( def get_trading_hours_for_instrument( data: dataBlob, instrument_code: str ) -> listOfTradingHours: - diag_contracts = dataContracts(data) contract_id = diag_contracts.get_priced_contract_id(instrument_code) diff --git a/sysproduction/interactive_manual_check_historical_prices.py b/sysproduction/interactive_manual_check_historical_prices.py index 115a116dd6..5d58d69aad 100644 --- a/sysproduction/interactive_manual_check_historical_prices.py +++ b/sysproduction/interactive_manual_check_historical_prices.py @@ -26,7 +26,6 @@ def interactive_manual_check_historical_prices(): :return: Nothing """ with dataBlob(log_name="Update-Historical-prices-manually") as data: - cleaning_config = interactively_get_config_overrides_for_cleaning(data=data) do_another = True diff --git a/sysproduction/interactive_order_stack.py b/sysproduction/interactive_order_stack.py index 0172742d1e..aeebab448a 100644 --- a/sysproduction/interactive_order_stack.py +++ b/sysproduction/interactive_order_stack.py @@ -189,7 +189,6 @@ def spawn_contracts_from_instrument_orders(data): def create_balance_trade(data): - print( "Most likely use case here is that IB has closed one of your positions as close to the expiry" ) @@ -331,7 +330,6 @@ def default_price_for_contract(data: dataBlob, futures_contract: futuresContract def create_instrument_balance_trade(data): - print("Use to fix breaks between instrument strategy and contract level positions") strategy_name = get_valid_strategy_name_from_user(data=data, source="positions") instrument_code = get_valid_instrument_code_from_user(data) @@ -380,7 +378,6 @@ def default_price_for_instrument(data: dataBlob, instrument_code: str) -> float: def create_manual_trade(data): - print( "Create a trade which will then be executed by the system (so don't use this if you are doing your trades manually)" ) diff --git a/sysproduction/interactive_update_capital_manual.py b/sysproduction/interactive_update_capital_manual.py index dd32c033c5..1a9837f6d0 100644 --- a/sysproduction/interactive_update_capital_manual.py +++ b/sysproduction/interactive_update_capital_manual.py @@ -22,7 +22,6 @@ def interactive_update_capital_manual(): :return: Nothing """ with dataBlob(log_name="Interactive-Update-Capital-Manual") as data: - still_running = True while still_running: # display capital and get input @@ -140,7 +139,6 @@ def get_initial_capital_values_from_user(data: dataBlob): def update_capital_from_ib(data: dataBlob): - data_capital = dataCapital(data) broker_account_value = get_broker_account_value(data) try: diff --git a/sysproduction/interactive_update_roll_status.py b/sysproduction/interactive_update_roll_status.py index cbe881160b..e1bdd744f5 100644 --- a/sysproduction/interactive_update_roll_status.py +++ b/sysproduction/interactive_update_roll_status.py @@ -54,7 +54,6 @@ def interactive_update_roll_status(): - with dataBlob(log_name="Interactive_Update-Roll-Status") as data: api = reportingApi(data) function_to_call = get_rolling_master_function() @@ -104,7 +103,6 @@ def original_roll_status_as_string(self): return self.original_roll_status.name def display_roll_query_banner(self): - print(landing_strip(80)) print("Current State: %s" % self.original_roll_status) print( @@ -206,7 +204,6 @@ def update_roll_status_full_auto(api: reportingApi, data: dataBlob): if roll_state_required is no_change_required: warn_not_rolling(instrument_code, auto_parameters) else: - modify_roll_state( data=api.data, instrument_code=instrument_code, @@ -228,7 +225,6 @@ def get_days_ahead_to_consider_when_auto_cycling() -> int: def get_list_of_instruments_to_auto_cycle(data: dataBlob, days_ahead: int = 10) -> list: - diag_prices = diagPrices() list_of_potential_instruments = ( diag_prices.get_list_of_instruments_in_multiple_prices() @@ -252,13 +248,11 @@ def get_list_of_instruments_to_auto_cycle(data: dataBlob, days_ahead: int = 10) def include_instrument_in_auto_cycle( data: dataBlob, instrument_code: str, days_ahead: int = 10 ) -> bool: - days_until_expiry = days_until_earliest_expiry(data, instrument_code) return days_until_expiry <= days_ahead def days_until_earliest_expiry(data: dataBlob, instrument_code: str) -> int: - data_contracts = dataContracts(data) carry_days = data_contracts.days_until_carry_expiry(instrument_code) roll_days = data_contracts.days_until_roll(instrument_code) @@ -376,7 +370,6 @@ def default_auto_roll_parameters(data: dataBlob) -> dict: def describe_roll_rules_from_parameters(auto_parameters: autoRollParameters): - print( "AUTO ROLL RULES:\n\n" + "%s\n\n" % describe_action_for_auto_roll_expired(auto_parameters) @@ -425,7 +418,6 @@ def auto_selected_roll_state_instrument( roll_data: RollDataWithStateReporting, auto_parameters: autoRollParameters, ) -> RollState: - run_roll_report(api, roll_data.instrument_code) roll_state_required = suggest_roll_state_for_instrument( roll_data=roll_data, auto_parameters=auto_parameters @@ -454,7 +446,6 @@ def suggest_roll_state_for_instrument( roll_data: RollDataWithStateReporting, auto_parameters: autoRollParameters, ) -> RollState: - forward_liquid = check_if_forward_liquid( roll_data=roll_data, auto_parameters=auto_parameters ) @@ -533,7 +524,6 @@ def check_if_getting_close_to_desired_roll_date( def check_if_expired_and_auto_rolling_expired( roll_data: RollDataWithStateReporting, auto_parameters: autoRollParameters ) -> bool: - expired = roll_data.days_until_expiry <= 0 auto_rolling_expired = auto_parameters.auto_roll_expired @@ -541,7 +531,6 @@ def check_if_expired_and_auto_rolling_expired( def warn_not_rolling(instrument_code: str, auto_parameters: autoRollParameters): - print_with_landing_strips_around( "\nNo change to rolling status for %s given parameters %s\n" % (instrument_code, str(auto_parameters)) @@ -701,7 +690,6 @@ def modify_roll_state( roll_state_required: RollState, confirm_adjusted_price_change: bool = True, ): - roll_state_is_unchanged = (roll_state_required is no_change_required) or ( roll_state_required is original_roll_state ) @@ -833,7 +821,6 @@ def get_roll_adjusted_multiple_prices_object( data: dataBlob, instrument_code: str, ) -> rollingAdjustedAndMultiplePrices: - ## returns failure if goes wrong try: rolling_adj_and_mult_object = rollingAdjustedAndMultiplePrices( @@ -858,7 +845,6 @@ def get_roll_adjusted_multiple_prices_object( def _get_roll_adjusted_multiple_prices_object_ffill_option( data: dataBlob, instrument_code: str ) -> rollingAdjustedAndMultiplePrices: - ## returns failure if goes wrong try_forward_fill = true_if_answer_is_yes( "Do you want to try forward filling prices first (less accurate, but guarantees roll)? [y/n]" @@ -920,7 +906,6 @@ def check_trading_limits_for_roll_state( def calculate_abs_trades_required_for_roll( data: dataBlob, roll_state_required: RollState, instrument_code: str ) -> float: - data_contacts = dataContracts(data) diag_positions = diagPositions(data) current_priced_contract_id = data_contacts.get_priced_contract_id( diff --git a/sysproduction/linux/scripts/run.py b/sysproduction/linux/scripts/run.py index 5aa720aeb1..63d8f78239 100755 --- a/sysproduction/linux/scripts/run.py +++ b/sysproduction/linux/scripts/run.py @@ -48,7 +48,6 @@ def resolve_func(func_reference_name): if __name__ == "__main__": - if len(sys.argv) == 1: print( "Enter the name of a function with full pathname eg systems.basesystem.System" diff --git a/sysproduction/reporting/account_curve_report.py b/sysproduction/reporting/account_curve_report.py index c2bc607155..8c8a37df4d 100644 --- a/sysproduction/reporting/account_curve_report.py +++ b/sysproduction/reporting/account_curve_report.py @@ -18,7 +18,6 @@ def account_curve_report( start_date=arg_not_supplied, end_date=arg_not_supplied, ): - if data is arg_not_supplied: data = dataBlob() @@ -33,14 +32,12 @@ def account_curve_report( def _account_curve_report_with_dates(reporting_api: reportingApi) -> list: - figure_object = reporting_api.figure_of_account_curve_using_dates() return [figure_object] def _account_curve_report_full(reporting_api: reportingApi) -> list: - formatted_output = [] for period in list_of_periods: diff --git a/sysproduction/reporting/adhoc/dynamic_optimisation.py b/sysproduction/reporting/adhoc/dynamic_optimisation.py index b4f2ff0f1f..bf76dbfa1a 100644 --- a/sysproduction/reporting/adhoc/dynamic_optimisation.py +++ b/sysproduction/reporting/adhoc/dynamic_optimisation.py @@ -40,7 +40,6 @@ def get_notional_risk_target(): def dynamic_optimisation_graphical(data: dataBlob, strategy_name: str): - report_config = reportConfig( title="Dynamic Optimisation Graphical", function="not_used", output="file" ) @@ -55,7 +54,6 @@ def dynamic_optimisation_graphical(data: dataBlob, strategy_name: str): def get_figures_for_DO(data: dataBlob, strategy_name: str): - df_results = get_data_for_scatter_plot(data, strategy_name) all_results = [] index_risk = get_notional_risk_target() @@ -96,7 +94,6 @@ def get_figures_for_DO(data: dataBlob, strategy_name: str): def get_data_for_scatter_plot(data: dataBlob, strategy_name: str) -> pd.DataFrame: - optimal_position_objects_as_list = get_optimal_position_objects_as_list( data=data, strategy_name=strategy_name ) @@ -128,7 +125,6 @@ def get_data_for_scatter_plot(data: dataBlob, strategy_name: str) -> pd.DataFram def plot_scatter_for_asset_class(results: pd.DataFrame, asset_class: str): - subset_results = results[results.asset_classes == asset_class] plot_scatter_names_only(subset_results) @@ -178,7 +174,6 @@ def get_optimised_weights(optimal_positions_as_list) -> portfolioWeights: def get_item_from_optimised_weights_list( optimal_positions_as_list: list, item_name: str ) -> dict: - optimal_weights_as_dict = dict( [ ( @@ -209,7 +204,6 @@ def instrument_codes_from_optimal_positions_as_list(optimal_positions_as_list) - def get_standard_deviations_for_instrument_list( data: dataBlob, instrument_list: list ) -> stdevEstimates: - stdev_dict = dict( [ ( @@ -226,7 +220,6 @@ def get_standard_deviations_for_instrument_list( def get_pd_df_of_betas_to_plot( data: dataBlob, strategy_name: str, index_risk: float ) -> tuple: - optimal_position_objects_as_list = get_optimal_position_objects_as_list( data=data, strategy_name=strategy_name ) @@ -271,7 +264,6 @@ def get_pd_df_of_beta_loadings( dict_of_betas: dict, dict_of_asset_classes: dict, ): - beta_loadings_unrounded = ( calculate_dict_of_beta_loadings_by_asset_class_given_weights( unrounded_weights, dict_of_betas, dict_of_asset_classes @@ -295,7 +287,6 @@ def get_pd_df_of_beta_loadings( def dynamic_optimisation_text(data: dataBlob, strategy_name: str): - report_config = reportConfig( title="Dynamic Optimisation Text", function="not_used", output="file" ) diff --git a/sysproduction/reporting/adhoc/instrument_list.py b/sysproduction/reporting/adhoc/instrument_list.py index 17952e3dc9..e9e8bad713 100644 --- a/sysproduction/reporting/adhoc/instrument_list.py +++ b/sysproduction/reporting/adhoc/instrument_list.py @@ -19,7 +19,6 @@ def instrument_list_report(): - report_config = reportConfig( title="Instrument list", function="not_used", output="file" ) @@ -66,7 +65,6 @@ def instrument_results_as_pd_df_row( data_broker: dataBroker, contract_data: dataContracts, ): - instrument_broker_data = data_broker.get_brokers_instrument_with_metadata( instrument_code ) diff --git a/sysproduction/reporting/adhoc/static_system.py b/sysproduction/reporting/adhoc/static_system.py index 6d389920a4..b9df178a3b 100644 --- a/sysproduction/reporting/adhoc/static_system.py +++ b/sysproduction/reporting/adhoc/static_system.py @@ -21,7 +21,6 @@ def static_system_adhoc_report( system_function, list_of_capital_and_estimate_instrument_count_tuples: list ): - data = dataBlob() report_config = reportConfig( title="Static selection of instruments", function="not_used", output="file" @@ -71,7 +70,6 @@ def static_system_results_for_capital( est_number_of_instruments: int, capital: float, ): - notional_starting_IDM = est_number_of_instruments**0.25 max_instrument_weight = 1.0 / est_number_of_instruments diff --git a/sysproduction/reporting/adhoc/trading_rule_pandl.py b/sysproduction/reporting/adhoc/trading_rule_pandl.py index 875b590fbd..e25db94a5d 100644 --- a/sysproduction/reporting/adhoc/trading_rule_pandl.py +++ b/sysproduction/reporting/adhoc/trading_rule_pandl.py @@ -30,7 +30,6 @@ def trading_rule_pandl_adhoc_report( system_function, end_date: datetime.datetime = arg_not_supplied, ): - data = dataBlob() report_config = reportConfig( title="Trading rule p&l", function="not_used", output="file" @@ -83,7 +82,6 @@ def get_figure_for_rule_group( period_label: str, end_date: datetime.datetime, ): - rules = dict_of_rule_groups[rule_group] pandl_by_rule = dict( [ diff --git a/sysproduction/reporting/api.py b/sysproduction/reporting/api.py index b63f7f2d89..0ced3272dd 100644 --- a/sysproduction/reporting/api.py +++ b/sysproduction/reporting/api.py @@ -102,7 +102,6 @@ def __init__( start_period: str = arg_not_supplied, end_period: str = arg_not_supplied, ): - self._data = data self._calendar_days_back = calendar_days_back self._passed_start_date = start_date @@ -175,7 +174,6 @@ def _get_daily_perc_pandl(self) -> pd.Series: def table_of_market_moves_using_dates( self, sortby: str, truncate: bool = True ) -> table: - # sort by one of ['name', 'change', 'vol_adjusted'] raw_df = self.market_moves_for_dates() sorted_df = raw_df.sort_values(sortby) @@ -193,7 +191,6 @@ def table_of_market_moves_using_dates( def table_of_market_moves_given_period( self, period: str, sortby: str, truncate: bool = True ) -> table: - # sort by one of ['name', 'change', 'vol_adjusted'] # period eg ['1B', '7D', '1M', '3M', '6M', 'YTD', '12M'] raw_df = self.market_moves_for_period(period) @@ -334,7 +331,6 @@ def body_text_total_capital_pandl(self): return body_text("Total p&l is %.3f%%" % total_capital_pandl) def table_pandl_for_instruments_across_strategies(self): - pandl_for_instruments_across_strategies_df = ( self.pandl_for_instruments_across_strategies() ) @@ -378,7 +374,6 @@ def pandl_for_instruments_across_strategies(self) -> pd.DataFrame: return pandl_for_instruments_across_strategies def _get_pandl_for_instruments_across_strategies(self) -> pd.DataFrame: - pandl_for_instruments_across_strategies_df = ( self.pandl_calculator.get_ranked_list_of_pandl_by_instrument_all_strategies_in_date_range() ) @@ -402,12 +397,10 @@ def _get_total_capital_pandl(self) -> float: return total_capital_pandl def body_text_residual_pandl(self): - residual = self.total_capital_pandl() - self.total_pandl_for_futures() return body_text("Residual p&l is %.3f%%" % residual) def table_strategy_pandl_and_residual(self): - strategies_pandl_df = self.pandl_calculator.get_strategy_pandl_and_residual() strategies_pandl_df = strategies_pandl_df.round(2) @@ -865,7 +858,6 @@ def _get_liquidity_data(self) -> pd.DataFrame: def table_of_sr_costs( self, include_commission: bool = True, include_spreads: bool = True ) -> table: - if not include_commission and not include_spreads: raise Exception("Must include commission or spreads!") elif not include_spreads: @@ -899,7 +891,6 @@ def SR_costs_spreads_only(self) -> pd.DataFrame: def _SR_costs( self, include_commission: bool = True, include_spread: bool = True ) -> pd.DataFrame: - SR_costs = get_table_of_SR_costs( self.data, include_commission=include_commission, @@ -1130,7 +1121,6 @@ def filter_data_for_delays_and_return_table( table_header="Only delayed data", max_delay_in_days=3, ): - filtered_data = filter_data_for_delays( data_with_datetime, datetime_colum=datetime_colum, @@ -1145,7 +1135,6 @@ def filter_data_for_delays_and_return_table( def filter_data_for_delays( data_with_datetime, datetime_colum="last_start", max_delay_in_days=3 ) -> pd.DataFrame: - max_delay_in_seconds = max_delay_in_days * SECONDS_PER_DAY time_delays = datetime.datetime.now() - data_with_datetime[datetime_colum] delayed = [ @@ -1159,7 +1148,6 @@ def filter_data_for_delays( def filter_data_for_max_value_and_return_table( data_with_field, field_column="field", max_value=0, table_header="" ): - filtered_data = filter_data_for_max_value( data_with_field, field_column=field_column, max_value=max_value ) @@ -1172,7 +1160,6 @@ def filter_data_for_max_value_and_return_table( def filter_data_for_max_value( data_with_field, field_column="field", max_value=0 ) -> pd.DataFrame: - field_values = data_with_field[field_column] filtered = [value <= max_value for value in field_values] diff --git a/sysproduction/reporting/costs_report.py b/sysproduction/reporting/costs_report.py index 57cc95bcad..97228a51b2 100644 --- a/sysproduction/reporting/costs_report.py +++ b/sysproduction/reporting/costs_report.py @@ -10,7 +10,6 @@ def costs_report(data: dataBlob = arg_not_supplied, calendar_days_back=250): - if data is arg_not_supplied: data = dataBlob() diff --git a/sysproduction/reporting/data/costs.py b/sysproduction/reporting/data/costs.py index 5972dd5d6e..b50a34ee59 100644 --- a/sysproduction/reporting/data/costs.py +++ b/sysproduction/reporting/data/costs.py @@ -35,7 +35,6 @@ def get_SR_cost_calculation_for_instrument( include_commission: bool = True, include_spread: bool = True, ): - diag_instruments = diagInstruments(data) costs_object = diag_instruments.get_cost_object(instrument_code) if not include_spread: @@ -77,7 +76,6 @@ def get_SR_cost_calculation_for_instrument( def adjust_df_costs_show_ticks( data: dataBlob, combined_df_costs: pd.DataFrame ) -> pd.DataFrame: - tick_adjusted_df_costs = copy(combined_df_costs) list_of_instrument_codes = list(tick_adjusted_df_costs.index) series_of_tick_values = get_series_of_tick_values(data, list_of_instrument_codes) @@ -120,7 +118,6 @@ def get_series_of_tick_values( def get_tick_value_for_instrument_code( instrument_code: str, broker_data: dataBroker, contract_data: dataContracts ) -> float: - try: contract_id = contract_data.get_priced_contract_id(instrument_code) except missingData: @@ -143,7 +140,6 @@ def get_tick_value_for_instrument_code( def get_combined_df_of_costs( data: dataBlob, start_date: datetime.datetime, end_date: datetime.datetime ) -> pd.DataFrame: - bid_ask_costs, actual_trade_costs, order_count = get_costs_from_slippage( data, start_date, end_date ) @@ -193,7 +189,6 @@ def best_estimate_from_cost_data( trades_to_count_as_config=10, samples_to_count_as_config=150, ) -> pd.Series: - worst_execution = pd.concat([bid_ask_costs, actual_trade_costs], axis=1) worst_execution = worst_execution.max(axis=1) @@ -308,7 +303,6 @@ def order_count_by_instrument(list_of_orders): def get_average_half_spread_by_instrument_from_raw_slippage( raw_slippage, use_column="bid_ask" ): - half_spreads_as_slippage = raw_slippage[use_column] half_spreads = -half_spreads_as_slippage half_spreads.index = raw_slippage.instrument_code diff --git a/sysproduction/reporting/data/duplicate_remove_markets.py b/sysproduction/reporting/data/duplicate_remove_markets.py index b720c99979..2ad7c72465 100644 --- a/sysproduction/reporting/data/duplicate_remove_markets.py +++ b/sysproduction/reporting/data/duplicate_remove_markets.py @@ -131,7 +131,6 @@ def removed_markets_addback(self) -> list: def bad_markets( self, apply_higher_threshold=False, apply_lower_threshold=False ) -> list: - threshold_factor = calculate_threshold_factor( apply_lower_threshold=apply_lower_threshold, apply_higher_threshold=apply_higher_threshold, @@ -531,7 +530,6 @@ def get_bad_market_filter_parameters(): def calculate_threshold_factor( apply_lower_threshold: bool = False, apply_higher_threshold: bool = False ) -> float: - ## The threshold factor is a number we apply ## To be stopped from trading an existing market must be well below the threshold for not being a bad market ## To be added to trading an existing bad market must be well above the threshold for not being a bad market diff --git a/sysproduction/reporting/data/pandl.py b/sysproduction/reporting/data/pandl.py index 5b167f14d0..17e77d6f93 100644 --- a/sysproduction/reporting/data/pandl.py +++ b/sysproduction/reporting/data/pandl.py @@ -52,7 +52,6 @@ def get_daily_perc_pandl(data): def get_total_capital_pandl(data, start_date, end_date=arg_not_supplied): - if end_date is arg_not_supplied: end_date = datetime.datetime.now() perc_pandl_series = get_daily_perc_pandl(data) @@ -292,7 +291,6 @@ def perc_pandl_series_for_strategy_instrument_vs_total_capital( def _get_perc_pandl_series_for_strategy_instrument_vs_total_capital( self, instrument_strategy: instrumentStrategy ): - pandl_series = get_perc_pandl_series_for_strategy_instrument_vs_total_capital( self.data, instrument_strategy ) @@ -364,7 +362,6 @@ def get_list_of_instruments_held_for_a_strategy(data, strategy_name): def get_perc_pandl_series_for_contract(data, instrument_code, contract_id): - capital = get_total_capital_series(data) fx = get_fx_series_for_instrument(data, instrument_code) diag_instruments = diagInstruments(data) diff --git a/sysproduction/reporting/data/positions.py b/sysproduction/reporting/data/positions.py index 3dea835459..68264c1bc6 100644 --- a/sysproduction/reporting/data/positions.py +++ b/sysproduction/reporting/data/positions.py @@ -28,7 +28,6 @@ def get_broker_positions(data): def get_position_breaks(data): - data_optimal = dataOptimalPositions(data) breaks_str0 = "Breaks Optimal vs actual %s" % str( data_optimal.get_list_of_optimal_position_breaks() diff --git a/sysproduction/reporting/data/pricechanges.py b/sysproduction/reporting/data/pricechanges.py index 492976448c..e37dd258e6 100644 --- a/sysproduction/reporting/data/pricechanges.py +++ b/sysproduction/reporting/data/pricechanges.py @@ -62,7 +62,6 @@ def get_market_move_for_instrument_and_dates(self, instrument_code: str) -> dict ) def get_market_moves_for_period(self, period: str) -> pd.DataFrame: - self._end_date = datetime.datetime.now() print("Getting data for %s" % period) @@ -115,7 +114,6 @@ def get_percentage_change( start_date: datetime.datetime, end_date: datetime.date, ) -> float: - price_series = self.get_prices_for_instrument(instrument_code) change = get_percentage_change_from_series_for_period( price_series, start_date=start_date, end_date=end_date @@ -157,7 +155,6 @@ def calculate_vol( start_date: datetime.datetime, end_date: datetime.date, ) -> float: - vol_scalar = get_approx_vol_scalar_versus_daily_vol_for_period( start_date, end_date ) @@ -170,7 +167,6 @@ def calculate_vol( def get_stdev_at_start_date_for_instrument( self, start_date: datetime.date, instrument_code: str ): - stdev = get_stdev_at_start_date_for_instrument( start_date=start_date, price_series=self.get_prices_for_instrument(instrument_code), diff --git a/sysproduction/reporting/data/risk.py b/sysproduction/reporting/data/risk.py index b63d09f7d8..ccf8cef147 100644 --- a/sysproduction/reporting/data/risk.py +++ b/sysproduction/reporting/data/risk.py @@ -78,7 +78,6 @@ def minimum_capital_table( idm=IDM_ASSUMED, instrument_weight=INSTRUMENT_WEIGHT_ASSUMED, ) -> pd.DataFrame: - instrument_risk_table = get_instrument_risk_table( data, only_held_instruments=only_held_instruments ) @@ -101,7 +100,6 @@ def from_risk_table_to_min_capital( idm=IDM_ASSUMED, instrument_weight=INSTRUMENT_WEIGHT_ASSUMED, ) -> pd.DataFrame: - base_multiplier = instrument_risk_table.point_size_base price = instrument_risk_table.price ann_perc_stdev = instrument_risk_table.annual_perc_stdev @@ -326,7 +324,6 @@ def get_pd_series_of_risk_by_asset_class( cmatrix: correlationEstimate, stdev: stdevEstimates, ) -> pd.Series: - unique_asset_classes = list(set(list(asset_classes.values()))) unique_asset_classes.sort() @@ -353,7 +350,6 @@ def get_risk_for_asset_class( cmatrix: correlationEstimate, stdev: stdevEstimates, ) -> float: - instruments_in_asset_class = [ instrument_code for instrument_code, instrument_asset_class in asset_classes.items() @@ -373,7 +369,6 @@ def get_risk_for_asset_class( def get_dict_of_asset_classes_for_instrument_list(data, instrument_list: list) -> dict: - diag_instruments = diagInstruments(data) asset_classes = dict( [ @@ -386,7 +381,6 @@ def get_dict_of_asset_classes_for_instrument_list(data, instrument_list: list) - def get_perc_of_capital_position_size_all_strategies(data) -> portfolioWeights: - instrument_list = get_instruments_with_positions_all_strategies(data) weights = portfolioWeights( [ @@ -460,7 +454,6 @@ def get_annualised_perc_of_capital_risk_of_positions_held_for_instruments_across def get_portfolio_risk_for_strategy(data, strategy_name): - weights = get_perc_of_capital_position_size_across_instruments_for_strategy( data, strategy_name ) @@ -476,7 +469,6 @@ def get_portfolio_risk_for_strategy(data, strategy_name): def get_perc_of_capital_position_size_across_instruments_for_strategy( data, strategy_name: str ) -> portfolioWeights: - instrument_list = get_instruments_with_positions(data, strategy_name) weights = portfolioWeights( [ @@ -592,7 +584,6 @@ def total_capital(data): def get_notional_exposure_in_base_currency_for_instrument( data, strategy_name, instrument_code ): - exposure_per_contract = get_exposure_per_contract_base_currency( data, instrument_code ) @@ -606,7 +597,6 @@ def get_notional_exposure_in_base_currency_for_instrument( def get_notional_exposure_in_base_currency_for_instrument_across_strategies( data, instrument_code ): - exposure_per_contract = get_exposure_per_contract_base_currency( data, instrument_code ) @@ -683,7 +673,6 @@ def get_asset_classes_for_instrument_list(data, instrument_codes: list) -> dict: def calculate_dict_of_beta_loadings_by_asset_class_given_weights( weights: portfolioWeights, dict_of_betas: dict, dict_of_asset_classes: dict ) -> dict: - dict_of_beta_loadings_per_instrument = ( calculate_dict_of_beta_loadings_per_instrument( dict_of_betas=dict_of_betas, weights=weights @@ -701,7 +690,6 @@ def calculate_dict_of_beta_loadings_by_asset_class_given_weights( def calculate_dict_of_beta_loadings_per_instrument( dict_of_betas: dict, weights: portfolioWeights ) -> dict: - list_of_instruments = dict_of_betas.keys() dict_of_beta_loadings_per_instrument = dict( @@ -717,7 +705,6 @@ def calculate_dict_of_beta_loadings_per_instrument( def calculate_beta_loadings_across_asset_classes( dict_of_asset_classes: dict, dict_of_beta_loadings_per_instrument: dict ) -> dict: - list_of_asset_classes = list(set(list(dict_of_asset_classes.values()))) beta_loadings_across_asset_classes = dict( [ @@ -741,7 +728,6 @@ def calculate_beta_loading_for_asset_class( dict_of_asset_classes: dict, dict_of_beta_loadings_per_instrument: dict, ) -> dict: - relevant_instruments = [ instrument_code for instrument_code, asset_class_for_instrument in dict_of_asset_classes.items() @@ -762,7 +748,6 @@ def calculate_beta_loading_for_asset_class( def get_beta_for_instrument_list( data: dataBlob, dict_of_asset_classes: dict, index_risk: float = arg_not_supplied ): - list_of_instruments = list(dict_of_asset_classes.keys()) perc_returns = last_years_perc_returns_for_list_of_instruments( data=data, list_of_instruments=list_of_instruments @@ -810,7 +795,6 @@ def get_equally_weighted_returns_across_asset_classes( perc_returns: pd.DataFrame, index_risk: float = arg_not_supplied, ) -> pd.DataFrame: - list_of_asset_classes = list(set(list(dict_of_asset_classes.values()))) results_as_list = [ @@ -844,7 +828,6 @@ def get_equally_weighted_returns_for_asset_class( perc_returns: pd.DataFrame, index_risk: float = arg_not_supplied, ) -> pd.Series: - instruments_in_asset_class = [ instrument for instrument, asset_class_for_instrument in dict_of_asset_classes.items() @@ -861,7 +844,6 @@ def get_equally_weighted_returns_for_asset_class( def calculate_equal_returns_to_avg_vol( perc_returns_for_asset_class: pd.DataFrame, index_risk: float = arg_not_supplied ) -> pd.Series: - std_by_instrument = perc_returns_for_asset_class.std(axis=0) perc_returns_for_asset_class_vol_norm = ( perc_returns_for_asset_class / std_by_instrument @@ -885,7 +867,6 @@ def dict_of_beta_by_instrument( perc_returns: pd.DataFrame, equally_weighted_returns_across_asset_classes: pd.DataFrame, ) -> dict: - list_of_instruments = list(set(list(dict_of_asset_classes.keys()))) dict_of_betas: Dict[str, float] = {} for instrument_code in list_of_instruments: @@ -906,7 +887,6 @@ def beta_for_instrument( perc_returns: pd.DataFrame, equally_weighted_returns_across_asset_classes: pd.DataFrame, ) -> Union[None, float]: - asset_class = dict_of_asset_classes[instrument_code] perc_returns_for_instrument = perc_returns[instrument_code] perc_returns_for_asset_class = equally_weighted_returns_across_asset_classes[ @@ -919,7 +899,6 @@ def beta_for_instrument( both_returns.columns = ["y", "x"] both_returns = both_returns.dropna() if not both_returns.empty: - reg_result = sm.ols(formula="y ~ x", data=both_returns).fit() beta = reg_result.params.x @@ -932,7 +911,6 @@ def dict_of_portfolio_beta_by_asset_class( weights: portfolioWeights, equally_weighted_returns_across_asset_classes: pd.DataFrame, ) -> dict: - list_of_asset_classes = list(set(list(dict_of_asset_classes.values()))) dict_of_portfolio_betas: Dict[str, float] = {} for asset_class in list_of_asset_classes: @@ -953,7 +931,6 @@ def portfolio_beta_for_asset_class( weights: portfolioWeights, equally_weighted_returns_across_asset_classes: pd.DataFrame, ) -> Union[None, float]: - perc_returns_for_portfolio = ( get_historical_portfolio_returns_with_fixed_current_weights( perc_returns=perc_returns, weights=weights @@ -969,7 +946,6 @@ def portfolio_beta_for_asset_class( both_returns.columns = ["y", "x"] both_returns = both_returns.dropna() if not both_returns.empty: - reg_result = sm.ols(formula="y ~ x", data=both_returns).fit() portfolio_beta = reg_result.params.x diff --git a/sysproduction/reporting/data/rolls.py b/sysproduction/reporting/data/rolls.py index 0b3ff1bd43..1ea3200e22 100644 --- a/sysproduction/reporting/data/rolls.py +++ b/sysproduction/reporting/data/rolls.py @@ -75,7 +75,6 @@ def get_roll_data_for_instrument(instrument_code, data): def relative_volume_in_forward_contract_versus_price( data: dataBlob, instrument_code: str ) -> float: - volumes = relative_volume_in_forward_contract_and_price(data, instrument_code) required_volume = volumes[1] if np.isnan(required_volume): @@ -87,7 +86,6 @@ def relative_volume_in_forward_contract_versus_price( def relative_volume_in_forward_contract_and_price( data: dataBlob, instrument_code: str ) -> list: - c_data = dataContracts(data) forward_contract_id = c_data.get_forward_contract_id(instrument_code) current_contract = c_data.get_priced_contract_id(instrument_code) @@ -101,7 +99,6 @@ def relative_volume_in_forward_contract_and_price( def volume_contracts_in_forward_contract(data: dataBlob, instrument_code: str) -> float: - c_data = dataContracts(data) forward_contract_id = c_data.get_forward_contract_id(instrument_code) v_data = diagVolumes(data) @@ -182,7 +179,6 @@ def updated_multiple_prices(self): def new_adjusted_prices(self): new_adjusted_prices = getattr(self, "_new_adjusted_prices", None) if new_adjusted_prices is None: - new_adjusted_prices = ( self._new_adjusted_prices ) = futuresAdjustedPrices.stitch_multiple_prices( @@ -340,7 +336,6 @@ def update_multiple_prices_on_roll( def get_final_matched_price_from_contract_object( data, contract_object, new_multiple_prices ): - diag_prices = diagPrices(data) price_series = diag_prices.get_merged_prices_for_contract_object( contract_object @@ -461,7 +456,6 @@ def last_price_data_with_matched_contracts(df_of_col_and_col_to_use): current_contract_to_find == final_contract_of_to_find and current_contract_to_infer_from == final_contract_of_to_infer_from ): - row_to_copy = df_of_col_and_col_to_use[ ["Price_to_find", "Price_infer_from"] ].iloc[data_row_idx] diff --git a/sysproduction/reporting/data/trades.py b/sysproduction/reporting/data/trades.py index 4ca3bf4232..d48309bf85 100644 --- a/sysproduction/reporting/data/trades.py +++ b/sysproduction/reporting/data/trades.py @@ -334,7 +334,6 @@ def vol_slippage_row(slippage_row, data): def vol_calculations_for_slippage_row(slippage_row, data): - last_annual_vol = get_last_annual_vol_for_slippage_row(slippage_row, data) input_items = [ @@ -362,7 +361,6 @@ def get_last_annual_vol_for_slippage_row(slippage_row, data): def get_stats_for_slippage_groups(df_to_process, item_list): results = {} for item_name in item_list: - sum_data = df_to_process.groupby(["strategy_name", "instrument_code"]).agg( {item_name: "sum"} ) @@ -414,7 +412,6 @@ def delay_row(order_row): def delay_calculations_for_order_row(order_row): - submit_minus_generated = delay_calc( order_row.parent_reference_datetime, order_row.submit_datetime ) diff --git a/sysproduction/reporting/data/volume.py b/sysproduction/reporting/data/volume.py index 115a068d9c..f40aa546fb 100644 --- a/sysproduction/reporting/data/volume.py +++ b/sysproduction/reporting/data/volume.py @@ -61,7 +61,6 @@ def get_average_daily_volume_for_contract_object( def get_best_average_daily_volume_for_instrument(data, instrument_code: str): - data_contracts = dataContracts(data) contract_dates = data_contracts.get_all_sampled_contracts(instrument_code) diff --git a/sysproduction/reporting/duplicate_market_report.py b/sysproduction/reporting/duplicate_market_report.py index ff6bbf15e0..3cac66d7e1 100644 --- a/sysproduction/reporting/duplicate_market_report.py +++ b/sysproduction/reporting/duplicate_market_report.py @@ -12,7 +12,6 @@ def duplicate_market_report( data: dataBlob = arg_not_supplied, ): - if data is arg_not_supplied: data = dataBlob() diff --git a/sysproduction/reporting/formatting.py b/sysproduction/reporting/formatting.py index 9e114a7174..8a32da1b10 100644 --- a/sysproduction/reporting/formatting.py +++ b/sysproduction/reporting/formatting.py @@ -14,7 +14,6 @@ def make_account_curve_plot( start_date: datetime.datetime = arg_not_supplied, end_date: datetime.datetime = arg_not_supplied, ): - curve_to_plot = daily_pandl.resample("1B").sum() if start_date is not arg_not_supplied: curve_to_plot = curve_to_plot[start_date:] @@ -45,7 +44,6 @@ def make_account_curve_plot_from_df( end_date: datetime.datetime = arg_not_supplied, title_style: dict = None, ): - curve_to_plot = pandl_df.resample("1B").sum() if start_date is not arg_not_supplied: curve_to_plot = curve_to_plot[start_date:] diff --git a/sysproduction/reporting/instrument_risk_report.py b/sysproduction/reporting/instrument_risk_report.py index 41a393699b..67f3d48f0a 100644 --- a/sysproduction/reporting/instrument_risk_report.py +++ b/sysproduction/reporting/instrument_risk_report.py @@ -20,7 +20,6 @@ def instrument_risk_report( data: dataBlob = arg_not_supplied, ): - if data is arg_not_supplied: data = dataBlob() diff --git a/sysproduction/reporting/market_monitor_report.py b/sysproduction/reporting/market_monitor_report.py index c1ff4381e8..c146dd52d6 100644 --- a/sysproduction/reporting/market_monitor_report.py +++ b/sysproduction/reporting/market_monitor_report.py @@ -30,7 +30,6 @@ def market_monitor_report( start_date=arg_not_supplied, end_date=arg_not_supplied, ): - if data is arg_not_supplied: data = dataBlob() @@ -72,7 +71,6 @@ def _market_monitor_with_dates(reporting_api: reportingApi) -> list: def _market_monitor_report_full(reporting_api: reportingApi) -> list: - formatted_output = [] formatted_output.append(reporting_api.terse_header("Market monitor report")) formatted_output.append(MARKET_REPORT) diff --git a/sysproduction/reporting/minimum_capital_report.py b/sysproduction/reporting/minimum_capital_report.py index 9dac54d8df..340d9c8c88 100644 --- a/sysproduction/reporting/minimum_capital_report.py +++ b/sysproduction/reporting/minimum_capital_report.py @@ -22,7 +22,6 @@ def minimum_capital_report( data: dataBlob = arg_not_supplied, ): - if data is arg_not_supplied: data = dataBlob() diff --git a/sysproduction/reporting/remove_markets_report.py b/sysproduction/reporting/remove_markets_report.py index cc4759ce1b..191c5b09f1 100644 --- a/sysproduction/reporting/remove_markets_report.py +++ b/sysproduction/reporting/remove_markets_report.py @@ -12,7 +12,6 @@ def remove_markets_report( data: dataBlob = arg_not_supplied, ): - if data is arg_not_supplied: data = dataBlob() diff --git a/sysproduction/reporting/reporting_functions.py b/sysproduction/reporting/reporting_functions.py index b08516da20..82607834ff 100644 --- a/sysproduction/reporting/reporting_functions.py +++ b/sysproduction/reporting/reporting_functions.py @@ -34,7 +34,6 @@ class ParsedReport(object): def __init__( self, text: str = arg_not_supplied, pdf_filename: str = arg_not_supplied ): - self._text = text self._pdf_filename = pdf_filename @@ -80,7 +79,6 @@ def run_report_with_data_blob(report_config: reportConfig, data: dataBlob): def run_report_from_config(report_config: reportConfig, data: dataBlob) -> list: - report_function = resolve_function(report_config.function) report_kwargs = report_config.kwargs @@ -206,7 +204,6 @@ def pandas_display_for_reports(): def output_report( data: dataBlob, report_config: reportConfig, parsed_report: ParsedReport ): - output = report_config.output # We either print or email or send to file or ... @@ -246,7 +243,6 @@ def display_pdf_report(parsed_report: ParsedReport): def email_report( parsed_report: ParsedReport, report_config: reportConfig, data: dataBlob ): - if parsed_report.contains_pdf: send_production_mail_msg_attachment( body="Report attached", diff --git a/sysproduction/reporting/slippage_report.py b/sysproduction/reporting/slippage_report.py index 00deaa9f42..00a1dcc85b 100644 --- a/sysproduction/reporting/slippage_report.py +++ b/sysproduction/reporting/slippage_report.py @@ -29,7 +29,6 @@ def slippage_report( end_date: datetime.datetime = arg_not_supplied, start_date: datetime.datetime = arg_not_supplied, ): - if data is arg_not_supplied: data = dataBlob() diff --git a/sysproduction/reporting/strategies_report.py b/sysproduction/reporting/strategies_report.py index ad3f5b26e2..85de750974 100644 --- a/sysproduction/reporting/strategies_report.py +++ b/sysproduction/reporting/strategies_report.py @@ -20,7 +20,6 @@ def strategy_report( data=arg_not_supplied, timestamp=arg_not_supplied, strategy_name=ALL_STRATEGIES ): - if data is arg_not_supplied: data = dataBlob() @@ -41,7 +40,6 @@ def strategy_report( def get_strategies_report_output(data, list_of_strategies, timestamp=arg_not_supplied): - formatted_output = [] for strategy_name in list_of_strategies: try: diff --git a/sysproduction/strategy_code/report_system_classic.py b/sysproduction/strategy_code/report_system_classic.py index ab01f09db2..18c2a25239 100644 --- a/sysproduction/strategy_code/report_system_classic.py +++ b/sysproduction/strategy_code/report_system_classic.py @@ -363,7 +363,6 @@ def get_forecast_matrix_over_code( def get_stage_breakdown_over_codes(backtest: interactiveBacktest, method_list: list): - value_dict = {} for config_for_method in method_list: value_dict[ diff --git a/sysproduction/strategy_code/report_system_dynamic_optimised.py b/sysproduction/strategy_code/report_system_dynamic_optimised.py index e9fa152d8e..ab6b0f5875 100644 --- a/sysproduction/strategy_code/report_system_dynamic_optimised.py +++ b/sysproduction/strategy_code/report_system_dynamic_optimised.py @@ -11,7 +11,6 @@ def report_system_dynamic(data: dataBlob, backtest: interactiveBacktest): - format_output = [] strategy_name = backtest.strategy_name @@ -41,7 +40,6 @@ def report_system_dynamic(data: dataBlob, backtest: interactiveBacktest): def get_optimal_positions_table_as_df( data: dataBlob, strategy_name: str ) -> pd.DataFrame: - data_optimal_positions = dataOptimalPositions(data) list_of_positions = ( diff --git a/sysproduction/strategy_code/run_dynamic_optimised_system.py b/sysproduction/strategy_code/run_dynamic_optimised_system.py index fe65a34787..7b266f69a7 100644 --- a/sysproduction/strategy_code/run_dynamic_optimised_system.py +++ b/sysproduction/strategy_code/run_dynamic_optimised_system.py @@ -22,7 +22,6 @@ class runSystemCarryTrendDynamic(runSystemClassic): - # DO NOT CHANGE THE NAME OF THIS FUNCTION; IT IS HARDCODED INTO CONFIGURATION FILES # BECAUSE IT IS ALSO USED TO LOAD BACKTESTS def system_method( @@ -55,7 +54,6 @@ def dynamic_system( notional_trading_capital: float = arg_not_supplied, base_currency: str = arg_not_supplied, ) -> System: - sim_data = get_sim_data_object_for_production(data) config = Config(config_filename) @@ -89,7 +87,6 @@ def dynamic_system( def futures_system(data, config): - system = System( [ Risk(), @@ -138,7 +135,6 @@ def updated_optimal_positions_for_dynamic_system( def construct_optimal_position_entry( data: dataBlob, system: System, instrument_code: str ) -> optimalPositionWithReference: - diag_contracts = dataContracts(data) optimal_position = get_optimal_position_from_system(system, instrument_code) @@ -158,7 +154,6 @@ def construct_optimal_position_entry( def get_optimal_position_from_system(system: System, instrument_code: str) -> float: - optimal_position = system.portfolio.get_notional_position(instrument_code) return float(optimal_position.iloc[-1]) diff --git a/sysproduction/strategy_code/run_system_classic.py b/sysproduction/strategy_code/run_system_classic.py index 9972c0d8f5..25e677c1d0 100644 --- a/sysproduction/strategy_code/run_system_classic.py +++ b/sysproduction/strategy_code/run_system_classic.py @@ -41,7 +41,6 @@ def __init__( strategy_name: str, backtest_config_filename=arg_not_supplied, ): - if backtest_config_filename is arg_not_supplied: raise Exception("Need to supply config filename") @@ -128,7 +127,6 @@ def production_classic_futures_system( notional_trading_capital: float = arg_not_supplied, base_currency: str = arg_not_supplied, ) -> System: - sim_data = get_sim_data_object_for_production(data) config = Config(config_filename) @@ -192,7 +190,6 @@ def construct_position_entry( lower_position: float, upper_position: float, ) -> bufferedOptimalPositions: - diag_contracts = dataContracts(data) reference_price = system.rawdata.get_daily_prices(instrument_code).iloc[-1] reference_contract = diag_contracts.get_priced_contract_id(instrument_code) diff --git a/sysproduction/update_historical_prices.py b/sysproduction/update_historical_prices.py index ef74b5a62d..cbaaf7baf5 100644 --- a/sysproduction/update_historical_prices.py +++ b/sysproduction/update_historical_prices.py @@ -337,7 +337,6 @@ def update_historical_prices_for_instrument_and_contract( cleaning_config: priceFilterConfig = arg_not_supplied, interactive_mode: bool = False, ): - diag_prices = diagPrices(data) intraday_frequency = diag_prices.get_intraday_frequency_for_historical_download() daily_frequency = DAILY_PRICE_FREQ @@ -433,7 +432,6 @@ def price_updating_or_errors( cleaning_config: priceFilterConfig, check_for_spike: bool = True, ): - price_updater = updatePrices(data) try: @@ -476,7 +474,6 @@ def report_price_spike(data: dataBlob, contract_object: futuresContract): def write_merged_prices_for_contract( data: dataBlob, contract_object: futuresContract, list_of_frequencies: list ): - ## note list of frequencies must have daily as last or groupby won't work with volume assert list_of_frequencies[-1] == DAILY_PRICE_FREQ diff --git a/sysproduction/update_multiple_adjusted_prices.py b/sysproduction/update_multiple_adjusted_prices.py index c27192070f..d053f1dcc2 100644 --- a/sysproduction/update_multiple_adjusted_prices.py +++ b/sysproduction/update_multiple_adjusted_prices.py @@ -151,7 +151,6 @@ def calc_updated_multiple_prices( def calc_update_adjusted_prices( data: dataBlob, instrument_code: str, updated_multiple_prices: futuresMultiplePrices ) -> futuresAdjustedPrices: - diag_prices = diagPrices(data) existing_adjusted_prices = diag_prices.get_adjusted_prices(instrument_code) @@ -210,7 +209,6 @@ def update_with_new_prices( updated_multiple_prices: futuresMultiplePrices, updated_adjusted_prices: futuresAdjustedPrices, ): - update_prices = updatePrices(data) update_prices.add_multiple_prices( diff --git a/sysproduction/update_sampled_contracts.py b/sysproduction/update_sampled_contracts.py index 2aeec70cf7..7cefed681d 100644 --- a/sysproduction/update_sampled_contracts.py +++ b/sysproduction/update_sampled_contracts.py @@ -105,7 +105,6 @@ def update_active_contracts_for_instrument(instrument_code: str, data: dataBlob) def get_contract_chain(data: dataBlob, instrument_code: str) -> listOfFuturesContracts: - furthest_out_contract = get_furthest_out_contract_with_roll_parameters( data, instrument_code ) @@ -119,7 +118,6 @@ def get_contract_chain(data: dataBlob, instrument_code: str) -> listOfFuturesCon def get_furthest_out_contract_with_roll_parameters( data: dataBlob, instrument_code: str ) -> contractDateWithRollParameters: - furthest_out_contract_date = get_furthest_out_contract_date(data, instrument_code) furthest_out_contract = ( create_furthest_out_contract_with_roll_parameters_from_contract_date( @@ -131,7 +129,6 @@ def get_furthest_out_contract_with_roll_parameters( def get_furthest_out_contract_date(data: dataBlob, instrument_code: str) -> str: - diag_prices = diagPrices(data) # Get the last contract currently being used @@ -145,7 +142,6 @@ def get_furthest_out_contract_date(data: dataBlob, instrument_code: str) -> str: def create_furthest_out_contract_with_roll_parameters_from_contract_date( data: dataBlob, instrument_code: str, furthest_out_contract_date: str ): - diag_contracts = dataContracts(data) roll_parameters = diag_contracts.get_roll_parameters(instrument_code) @@ -159,7 +155,6 @@ def create_furthest_out_contract_with_roll_parameters_from_contract_date( def create_contract_object_chain( furthest_out_contract: contractDateWithRollParameters, instrument_code: str ) -> listOfFuturesContracts: - contract_date_chain = create_contract_date_chain(furthest_out_contract) contract_object_chain = create_contract_object_chain_from_contract_date_chain( instrument_code, contract_date_chain @@ -185,7 +180,6 @@ def create_contract_date_chain( def create_contract_object_chain_from_contract_date_chain( instrument_code: str, contract_date_chain: list ) -> listOfFuturesContracts: - # We have a list of contract_date objects, need futureContracts # create a 'bare' instrument object instrument_object = futuresInstrument(instrument_code) @@ -411,7 +405,6 @@ def update_contract_object_with_new_expiry_date( def check_key_contracts_have_not_expired(instrument_code: str, data: dataBlob): - key_contract_ids = get_list_of_key_contract_ids( instrument_code=instrument_code, data=data ) diff --git a/sysproduction/update_strategy_capital.py b/sysproduction/update_strategy_capital.py index c6ef7a2068..f207ec61d2 100644 --- a/sysproduction/update_strategy_capital.py +++ b/sysproduction/update_strategy_capital.py @@ -93,7 +93,6 @@ def get_total_current_margin(data: dataBlob) -> float: def call_allocation_function(data: dataBlob, capital_to_allocate: float) -> dict: - strategy_allocation_config_dict = get_strategy_allocation_config_dict(data) strategy_allocation_function_str = strategy_allocation_config_dict.pop("function") diff --git a/sysproduction/update_strategy_orders.py b/sysproduction/update_strategy_orders.py index 2463cb7934..5a53e03b1b 100644 --- a/sysproduction/update_strategy_orders.py +++ b/sysproduction/update_strategy_orders.py @@ -13,7 +13,6 @@ def update_strategy_orders(): ## function if called from script with dataBlob(log_name="Update-Strategy-Orders") as data: - list_of_strategies = get_list_of_strategies_for_process(data, process_name) ALL = "ALL" print("Which strategy?") diff --git a/sysquant/estimators/clustering_correlations.py b/sysquant/estimators/clustering_correlations.py index 26237ed13b..3de34934ea 100644 --- a/sysquant/estimators/clustering_correlations.py +++ b/sysquant/estimators/clustering_correlations.py @@ -14,7 +14,6 @@ def assets_in_cluster_order(corr_matrix: correlationEstimate, cluster_size: int def cluster_correlation_matrix(corr_matrix: correlationEstimate, cluster_size: int = 2): - if corr_matrix.is_boring: # Boring correlation will break if we try and cluster corr_as_np = corr_matrix.values @@ -92,7 +91,6 @@ def arbitrary_split_for_asset_length(count_assets: int, cluster_size: int = 2) - def from_cluster_index_to_asset_names( clusters: list, corr_matrix: correlationEstimate ) -> list: - all_clusters = list(set(clusters)) asset_names = corr_matrix.columns list_of_asset_clusters = [ @@ -106,7 +104,6 @@ def from_cluster_index_to_asset_names( def get_asset_names_for_cluster_index( cluster_id: int, clusters: list, asset_names: list ): - list_of_assets = [ asset for asset, cluster in zip(asset_names, clusters) if cluster == cluster_id ] diff --git a/sysquant/estimators/correlation_estimator.py b/sysquant/estimators/correlation_estimator.py index 240ef7ce13..dad39e735e 100644 --- a/sysquant/estimators/correlation_estimator.py +++ b/sysquant/estimators/correlation_estimator.py @@ -56,7 +56,6 @@ def correlation_estimator_for_subperiod( shrinkage: float = 0.0, **_ignored_kwargs, ): - subperiod_data = data_for_correlation[fit_period.fit_start : fit_period.fit_end] corr_matrix_values = subperiod_data.corr() diff --git a/sysquant/estimators/correlation_over_time.py b/sysquant/estimators/correlation_over_time.py index fd3ac4cb25..b13f8055da 100644 --- a/sysquant/estimators/correlation_over_time.py +++ b/sysquant/estimators/correlation_over_time.py @@ -12,7 +12,6 @@ def correlation_over_time_for_returns( forward_fill_price_index=True, **kwargs, ) -> CorrelationList: - index_prices_for_correlation = returns_for_correlation.cumsum() if forward_fill_price_index: index_prices_for_correlation = index_prices_for_correlation.ffill() @@ -34,7 +33,6 @@ def correlation_over_time( interval_frequency: str = "12M", **kwargs, ) -> CorrelationList: - column_names = list(data_for_correlation.columns) # Generate time periods diff --git a/sysquant/estimators/correlations.py b/sysquant/estimators/correlations.py index fd6a3049a5..3a3d7244a8 100644 --- a/sysquant/estimators/correlations.py +++ b/sysquant/estimators/correlations.py @@ -82,7 +82,6 @@ def shrink_to_offdiag(self, offdiag=0.0, shrinkage_corr: float = 1.0): return self.shrink(prior_corr=prior_corr, shrinkage_corr=shrinkage_corr) def shrink(self, prior_corr: "correlationEstimate", shrinkage_corr: float = 1.0): - if shrinkage_corr == 1.0: return prior_corr @@ -128,14 +127,12 @@ def clean_corr_matrix_given_data( return clean_correlation def clean_correlations(self, must_haves: list = arg_not_supplied, offdiag=0.99): - # means we can use earlier correlations with sensible values cleaned_corr_matrix = clean_correlation(self, must_haves, offdiag=offdiag) return cleaned_corr_matrix def boring_corr_matrix(self, offdiag: float = 0.99, diag: float = 1.0): - return create_boring_corr_matrix( self.size, offdiag=offdiag, diag=diag, columns=self.columns ) @@ -285,7 +282,6 @@ def create_boring_corr_matrix( def boring_corr_matrix_values( size: int, offdiag: float = 0.99, diag: float = 1.0 ) -> np.array: - size_index = range(size) def _od(i, j, offdiag, diag): @@ -464,7 +460,6 @@ def modify_correlation( shrinkage: float = 0.0, clip=arg_not_supplied, ): - if floor_at_zero: corr_matrix = corr_matrix.floor_correlation_matrix(floor=0.0) diff --git a/sysquant/estimators/covariance.py b/sysquant/estimators/covariance.py index 6157a43b29..03fa2e8fc6 100644 --- a/sysquant/estimators/covariance.py +++ b/sysquant/estimators/covariance.py @@ -27,7 +27,6 @@ def assets_with_missing_data(self) -> list: def covariance_from_stdev_and_correlation( correlation_estimate: correlationEstimate, stdev_estimate: stdevEstimates ) -> covarianceEstimate: - all_assets = set(list(correlation_estimate.columns) + stdev_estimate.list_of_keys()) list_of_assets_with_data = list( set(correlation_estimate.assets_with_data()).intersection( @@ -50,7 +49,6 @@ def covariance_from_stdev_and_correlation( def get_annualised_risk( std_dev: stdevEstimates, cmatrix: correlationEstimate, weights: portfolioWeights ) -> float: - weights_as_np = weights.as_np() std_dev_as_np = std_dev.as_np() cmatrix_as_np = cmatrix.as_np() @@ -67,7 +65,6 @@ def get_annualised_risk( def clean_values(std_dev: np.array, cmatrix: np.array, weights: np.array): - cmatrix[np.isnan(cmatrix)] = 1.0 weights[np.isnan(weights)] = 0.0 std_dev[np.isnan(std_dev)] = 100.0 diff --git a/sysquant/estimators/diversification_multipliers.py b/sysquant/estimators/diversification_multipliers.py index ddfe23f3b3..6c91f893ff 100644 --- a/sysquant/estimators/diversification_multipliers.py +++ b/sysquant/estimators/diversification_multipliers.py @@ -41,8 +41,7 @@ def diversification_multiplier_from_list( # here's where we stack up the answers div_mult_vector = [] - for (corrmatrix, start_of_period) in zip(correlation_list.corr_list, ref_periods): - + for corrmatrix, start_of_period in zip(correlation_list.corr_list, ref_periods): weight_slice = weight_df_aligned[:start_of_period] if weight_slice.shape[0] == 0: # empty space diff --git a/sysquant/estimators/estimates.py b/sysquant/estimators/estimates.py index cae25dc8f1..bab618ffa9 100644 --- a/sysquant/estimators/estimates.py +++ b/sysquant/estimators/estimates.py @@ -61,7 +61,6 @@ def equalise_estimates( ann_target_SR: float = 0.5, equalise_vols: bool = True, ): - return equalise_estimates( self, equalise_SR=equalise_SR, @@ -114,7 +113,6 @@ def equalise_estimates( ann_target_SR: float = 0.5, equalise_vols: bool = True, ) -> Estimates: - list_of_asset_names = estimates.asset_names mean_list = estimates.mean_list stdev_list = estimates.stdev_list @@ -156,7 +154,6 @@ def equalise_estimates_from_lists( ann_target_SR: float = 0.5, equalise_vols: bool = True, ) -> list: - equalise_vols = str2Bool(equalise_vols) equalise_SR = str2Bool(equalise_SR) @@ -213,7 +210,6 @@ def vol_equaliser(mean_list, stdev_list): def shrink_means_to_SR( estimates: Estimates, shrinkage_SR: float = 1.0, target_SR=0.5 ) -> meanEstimates: - list_of_asset_names = estimates.asset_names mean_list = estimates.mean_list stdev_list = estimates.stdev_list diff --git a/sysquant/estimators/exponential_correlation.py b/sysquant/estimators/exponential_correlation.py index 5631016776..9102ba9ca6 100644 --- a/sysquant/estimators/exponential_correlation.py +++ b/sysquant/estimators/exponential_correlation.py @@ -24,7 +24,6 @@ def __init__( offdiag: float = 0.99, **_ignored_kwargs, ): - super().__init__( data_for_correlation, ew_lookback=ew_lookback, @@ -44,7 +43,6 @@ def perform_calculations( adjusted_min_periods=20, **other_kwargs, ): - correlation_calculations = exponentialCorrelationResults( data_for_correlation, ew_lookback=adjusted_lookback, @@ -85,7 +83,6 @@ def missing_data(self): def get_estimate_for_fitperiod_with_data( self, fit_period: fitDates = arg_not_supplied ) -> correlationEstimate: - if fit_period is arg_not_supplied: fit_period = self._get_default_fit_period_cover_all_data() @@ -148,13 +145,12 @@ def __init__( min_periods: int = 20, **_ignored_kwargs, ): - columns = data_for_correlation.columns self._columns = columns raw_correlations = data_for_correlation.ewm( span=ew_lookback, min_periods=min_periods, ignore_na=True - ).corr(pairwise=True, ignore_na=True) + ).corr(pairwise=True) self._raw_correlations = raw_correlations @@ -184,7 +180,6 @@ def columns(self) -> list: def last_valid_cor_matrix_for_date( raw_correlations: pd.DataFrame, columns: list, date_point: datetime.datetime ) -> correlationEstimate: - size_of_matrix = len(columns) corr_matrix_values = ( raw_correlations[raw_correlations.index.get_level_values(0) < date_point] diff --git a/sysquant/estimators/generic_estimator.py b/sysquant/estimators/generic_estimator.py index 96b088e302..489f78a5d7 100644 --- a/sysquant/estimators/generic_estimator.py +++ b/sysquant/estimators/generic_estimator.py @@ -36,7 +36,6 @@ def __init__( length_adjustment: int = 1, **other_kwargs, ): - adjusted_lookback = ew_lookback * length_adjustment adjusted_min_periods = min_periods * length_adjustment diff --git a/sysquant/estimators/mean_estimator.py b/sysquant/estimators/mean_estimator.py index aab835998d..c96bed527c 100644 --- a/sysquant/estimators/mean_estimator.py +++ b/sysquant/estimators/mean_estimator.py @@ -41,7 +41,6 @@ def __init__( frequency: str = "W", **_ignored_kwargs, ): - super().__init__( data_for_mean, ew_lookback=ew_lookback, @@ -62,7 +61,6 @@ def perform_calculations( adjusted_min_periods=20, **_other_kwargs, ) -> pd.DataFrame: - mean_calculations = exponential_mean( data, ew_lookback=adjusted_lookback, min_periods=adjusted_min_periods ) @@ -89,7 +87,6 @@ def get_estimate_for_fitperiod_with_data( def exponential_mean( data_for_mean: pd.DataFrame, ew_lookback: int = 250, min_periods: int = 20 ) -> pd.DataFrame: - exponential_mean = data_for_mean.ewm( span=ew_lookback, min_periods=min_periods ).mean() @@ -105,7 +102,6 @@ def __init__( frequency: str = "W", **kwargs, ): - super().__init__(data_for_mean, using_exponent=using_exponent, **kwargs) def calculate_estimate_normally(self, fit_period: fitDates) -> meanEstimates: @@ -172,7 +168,6 @@ def empty_mean(data_for_mean: pd.DataFrame) -> meanEstimates: def annualise_mean_estimate(mean: meanEstimates, frequency: str) -> meanEstimates: - return meanEstimates( [ (asset_name, annualised_mean(mean_value, frequency=frequency)) diff --git a/sysquant/estimators/pooled_correlation.py b/sysquant/estimators/pooled_correlation.py index 3502f76e54..2a93ca51c8 100644 --- a/sysquant/estimators/pooled_correlation.py +++ b/sysquant/estimators/pooled_correlation.py @@ -13,7 +13,6 @@ def pooled_correlation_estimator( data: listOfDataFrames, frequency="W", forward_fill_data=True, **kwargs ) -> CorrelationList: - copied_data = copy(data) if forward_fill_data: # NOTE if we're not pooling passes a list of one diff --git a/sysquant/estimators/stdev_estimator.py b/sysquant/estimators/stdev_estimator.py index a117175b5c..e8eb61d1a1 100644 --- a/sysquant/estimators/stdev_estimator.py +++ b/sysquant/estimators/stdev_estimator.py @@ -76,7 +76,6 @@ def __init__( frequency: str = "W", **_ignored_kwargs, ): - super().__init__( data_for_stdev, ew_lookback=ew_lookback, @@ -97,7 +96,6 @@ def perform_calculations( adjusted_min_periods=20, **other_kwargs, ) -> pd.DataFrame: - stdev_calculations = exponential_std_deviation( data_for_stdev, ew_lookback=adjusted_lookback, @@ -129,7 +127,6 @@ def exponential_std_deviation( min_periods: int = 20, **_ignored_kwargs, ) -> pd.DataFrame: - exponential_stdev = data_for_stdev.ewm( span=ew_lookback, min_periods=min_periods ).std() diff --git a/sysquant/estimators/vol.py b/sysquant/estimators/vol.py index e2556bb3b8..fdb1cb597b 100644 --- a/sysquant/estimators/vol.py +++ b/sysquant/estimators/vol.py @@ -185,7 +185,6 @@ def mixed_vol_calc( def simple_ewvol_calc( daily_returns: pd.Series, days: int = 35, min_periods: int = 10, **ignored_kwargs ) -> pd.Series: - # Standard deviation will be nan for first 10 non nan values vol = daily_returns.ewm(adjust=True, span=days, min_periods=min_periods).std() @@ -195,7 +194,6 @@ def simple_ewvol_calc( def simple_vol_calc( daily_returns: pd.Series, days: int = 25, min_periods: int = 10, **ignored_kwargs ) -> pd.Series: - # Standard deviation will be nan for first 10 non nan values vol = daily_returns.rolling(days, min_periods=min_periods).std() diff --git a/sysquant/fitting_dates.py b/sysquant/fitting_dates.py index f40a6249f7..3764f909e3 100644 --- a/sysquant/fitting_dates.py +++ b/sysquant/fitting_dates.py @@ -151,7 +151,6 @@ def _get_start_and_end_date(data): def _in_sample_dates(start_date: datetime.datetime, end_date: datetime.datetime): - return listOfFittingDates([fitDates(start_date, end_date, start_date, end_date)]) @@ -186,7 +185,6 @@ def _fit_dates_for_period_index( date_method: str = "expanding", rollyears=20, ): - period_start = list_of_starting_dates_per_period[period_index] period_end = list_of_starting_dates_per_period[period_index + 1] diff --git a/sysquant/optimisation/SR_adjustment.py b/sysquant/optimisation/SR_adjustment.py index 6ac1776832..b5f952cec5 100644 --- a/sysquant/optimisation/SR_adjustment.py +++ b/sysquant/optimisation/SR_adjustment.py @@ -12,7 +12,6 @@ def adjust_dataframe_of_weights_for_SR_costs( weights: pd.DataFrame, costs_dict: dict ) -> pd.DataFrame: - asset_names = list(weights.columns) SR_list = [costs_dict[asset] for asset in asset_names] @@ -38,7 +37,6 @@ def adjust_dataframe_of_weights_for_SR( avg_correlation: float = 0.5, years_of_data: float = 10, ) -> pd.DataFrame: - list_of_weight_lists = [ list(weights.iloc[idx].values) for idx in range(len(weights.index)) ] @@ -63,7 +61,6 @@ def adjust_list_of_weight_lists_for_SR( avg_correlation: float = 0.5, years_of_data: float = 10, ) -> list: - adj_weight_list = [ adjust_weights_for_SR( weights_as_list, @@ -80,7 +77,6 @@ def adjust_list_of_weight_lists_for_SR( def adjust_weights_for_SR( weights_as_list: list, SR_list: list, avg_correlation: float, years_of_data: float ) -> list: - if len(weights_as_list) == 1: return weights_as_list @@ -222,7 +218,6 @@ def calculate_confident_mean_difference( confidence_interval: float, avg_correlation: float, ) -> float: - omega_difference = calculate_omega_difference(std, years_of_data, avg_correlation) confident_mean_difference = stats.norm(mean_difference, omega_difference).ppf( confidence_interval @@ -234,7 +229,6 @@ def calculate_confident_mean_difference( def calculate_omega_difference( std: float, years_of_data: float, avg_correlation: float ): - omega_one_asset = std / (years_of_data) ** 0.5 omega_variance_difference = 2 * (omega_one_asset**2) * (1 - avg_correlation) omega_difference = omega_variance_difference**0.5 diff --git a/sysquant/optimisation/cleaning.py b/sysquant/optimisation/cleaning.py index 19215e6761..3fed445aba 100644 --- a/sysquant/optimisation/cleaning.py +++ b/sysquant/optimisation/cleaning.py @@ -20,7 +20,6 @@ def get_must_have_dict_from_data(data: pd.DataFrame) -> dict: def clean_weights( weights: portfolioWeights, must_haves: dict, fraction: float = 0.5 ) -> portfolioWeights: - ( asset_names, list_of_weights, @@ -114,7 +113,6 @@ def clean_list_of_weights( def _good_weight( value, idx, needs_replacing, keep_empty, each_missing_weight, adjustment_on_rest ): - if needs_replacing[idx]: return each_missing_weight if keep_empty[idx]: diff --git a/sysquant/optimisation/full_handcrafting.py b/sysquant/optimisation/full_handcrafting.py index 076f815825..8ac07440a4 100644 --- a/sysquant/optimisation/full_handcrafting.py +++ b/sysquant/optimisation/full_handcrafting.py @@ -41,7 +41,6 @@ def __init__(self): pass def __repr__(self): - return "%s \n %s " % (self.calcs, self.description) @@ -1338,7 +1337,6 @@ def cluster_correlation_matrix(corr_matrix: np.array, max_cluster_size=3) -> lis def optimise(sigma, mean_list): - # will replace nans with big negatives mean_list = fix_mus(mean_list) diff --git a/sysquant/optimisation/generic_optimiser.py b/sysquant/optimisation/generic_optimiser.py index acaf06cfe5..8b4a9cf7e6 100644 --- a/sysquant/optimisation/generic_optimiser.py +++ b/sysquant/optimisation/generic_optimiser.py @@ -14,7 +14,6 @@ def __init__( log=get_logger("optimiser"), **weighting_params, ): - net_returns = returns_pre_processor.get_net_returns(asset_name) self._net_returns = net_returns diff --git a/sysquant/optimisation/optimise_over_time.py b/sysquant/optimisation/optimise_over_time.py index 825366410e..a1ba64fbfb 100644 --- a/sysquant/optimisation/optimise_over_time.py +++ b/sysquant/optimisation/optimise_over_time.py @@ -20,7 +20,6 @@ def __init__( log=get_logger("optimiser"), **kwargs, ): - # Generate time periods fit_dates = generate_fitting_dates( net_returns, date_method=date_method, rollyears=rollyears diff --git a/sysquant/optimisation/optimisers/call_optimiser.py b/sysquant/optimisation/optimisers/call_optimiser.py index e6f9c1133b..d427137db6 100644 --- a/sysquant/optimisation/optimisers/call_optimiser.py +++ b/sysquant/optimisation/optimisers/call_optimiser.py @@ -21,7 +21,6 @@ def optimiser_for_method( method: str, estimates: Estimates, **weighting_args ) -> estimatesWithPortfolioWeights: - assets_with_missing_data = estimates.assets_with_missing_data() estimates_with_only_valid_data = estimates.subset_with_available_data() if estimates_with_only_valid_data.size == 0: @@ -61,7 +60,6 @@ def weights_and_estimates_with_no_valid_data( def call_optimiser( method: str, estimates_with_only_valid_data: Estimates, **weighting_args ) -> estimatesWithPortfolioWeights: - optimisation_function = REGISTER_OF_OPTIMISERS.get(method, None) if optimisation_function is None: error_msg = "Optimiser %s not recognised" % method diff --git a/sysquant/optimisation/optimisers/equal_weights.py b/sysquant/optimisation/optimisers/equal_weights.py index 224883168e..9e1180f08c 100644 --- a/sysquant/optimisation/optimisers/equal_weights.py +++ b/sysquant/optimisation/optimisers/equal_weights.py @@ -8,7 +8,6 @@ def equal_weights_optimisation( estimates: Estimates, **_ignore_weighting_args ) -> estimatesWithPortfolioWeights: - portfolio_weights = one_over_n_portfolio_weights_from_estimates(estimates) estimates_with_weights = estimatesWithPortfolioWeights( weights=portfolio_weights, estimates=estimates diff --git a/sysquant/optimisation/optimisers/handcraft.py b/sysquant/optimisation/optimisers/handcraft.py index f051f5d93e..fae4ad2ad7 100644 --- a/sysquant/optimisation/optimisers/handcraft.py +++ b/sysquant/optimisation/optimisers/handcraft.py @@ -26,7 +26,6 @@ def handcraft_optimisation( equalise_vols: bool = True, **_ignored_weighting_kwargs, ) -> estimatesWithPortfolioWeights: - weights = get_handcrafted_portfolio_weights_for_valid_data( estimates, equalise_vols=equalise_vols, equalise_SR=equalise_SR ) @@ -41,7 +40,6 @@ def handcraft_optimisation( def get_handcrafted_portfolio_weights_for_valid_data( estimates: Estimates, equalise_vols: bool = True, equalise_SR: bool = False ) -> portfolioWeights: - handcraft_portfolio = handcraftPortfolio(estimates) risk_weights = handcraft_portfolio.risk_weights(equalise_SR=equalise_SR) @@ -57,7 +55,6 @@ def get_handcrafted_portfolio_weights_for_valid_data( class handcraftPortfolio(object): def __init__(self, estimates: Estimates): - self._estimates = estimates @property @@ -117,7 +114,6 @@ def risk_weights(self, equalise_SR: bool = False) -> portfolioWeights: return adjusted_weights def risk_weights_this_portfolio(self) -> portfolioWeights: - asset_names = self.asset_names raw_weights = one_over_n_weights_given_asset_names(asset_names) @@ -152,7 +148,6 @@ def subset(self, subset_of_asset_names: list): def adjust_weights_for_SR_on_handcrafted_portfolio( raw_weights: portfolioWeights, handcraft_portfolio: handcraftPortfolio ) -> portfolioWeights: - SR_list = handcraft_portfolio.sharpe_ratio avg_correlation = handcraft_portfolio.avg_correlation years_of_data = handcraft_portfolio.data_length_years @@ -178,7 +173,6 @@ def adjust_weights_for_SR_on_handcrafted_portfolio( def create_sub_portfolios_from_portfolio(handcraft_portfolio: handcraftPortfolio): - clusters_as_names = cluster_correlation_matrix(handcraft_portfolio.correlation) sub_portfolios = create_sub_portfolios_given_clusters( @@ -242,7 +236,6 @@ def multiplied_out_risk_weight_for_sub_portfolios( div_mult_for_portfolio: float = 1.0, weight_for_subportfolio: float = 0.5, ) -> portfolioWeights: - asset_names = list(weights_for_portfolio.keys()) mult_weights = portfolioWeights( [ diff --git a/sysquant/optimisation/optimisers/one_period.py b/sysquant/optimisation/optimisers/one_period.py index 703a1dc141..98f6be05b0 100644 --- a/sysquant/optimisation/optimisers/one_period.py +++ b/sysquant/optimisation/optimisers/one_period.py @@ -6,7 +6,6 @@ def one_period_optimisation( estimates: Estimates, **weighting_kwargs ) -> estimatesWithPortfolioWeights: - estimates_with_portfolio_weights = optimise_given_estimates( estimates=estimates, **weighting_kwargs ) diff --git a/sysquant/optimisation/optimisers/shrinkage.py b/sysquant/optimisation/optimisers/shrinkage.py index 26d23c3213..50536d0708 100644 --- a/sysquant/optimisation/optimisers/shrinkage.py +++ b/sysquant/optimisation/optimisers/shrinkage.py @@ -11,7 +11,6 @@ def shrinkage_optimisation( ann_target_SR=0.5, **weighting_kwargs, ) -> estimatesWithPortfolioWeights: - estimates = estimates.shrink_correlation_to_average(shrinkage_corr) estimates = estimates.shrink_means_to_SR( shrinkage_SR=shrinkage_SR, ann_target_SR=ann_target_SR diff --git a/sysquant/optimisation/portfolio_optimiser.py b/sysquant/optimisation/portfolio_optimiser.py index 3214b81410..c2cf95112c 100644 --- a/sysquant/optimisation/portfolio_optimiser.py +++ b/sysquant/optimisation/portfolio_optimiser.py @@ -31,7 +31,6 @@ def __init__( method="handcraft", **weighting_args, ): - self._net_returns = net_returns self._log = log self._weighting_args = weighting_args @@ -62,7 +61,6 @@ def cleaning(self) -> bool: return str2Bool(self.weighting_args["cleaning"]) def calculate_weights_for_period(self, fit_period: fitDates) -> portfolioWeights: - if fit_period.no_data: return one_over_n_weights_given_data(self.net_returns) @@ -76,7 +74,6 @@ def calculate_weights_for_period(self, fit_period: fitDates) -> portfolioWeights def clean_weights_for_period( self, weights: portfolioWeights, fit_period: fitDates ) -> portfolioWeights: - if fit_period.no_data: return weights @@ -88,7 +85,6 @@ def clean_weights_for_period( return cleaned_weights def calculate_weights_given_data(self, fit_period: fitDates) -> portfolioWeights: - estimates_and_portfolio_weights = ( self.get_weights_and_returned_estimates_for_period(fit_period) ) @@ -99,7 +95,6 @@ def calculate_weights_given_data(self, fit_period: fitDates) -> portfolioWeights def get_weights_and_returned_estimates_for_period( self, fit_period: fitDates ) -> estimatesWithPortfolioWeights: - method = self.method weighting_args = self._weighting_args diff --git a/sysquant/optimisation/pre_processing.py b/sysquant/optimisation/pre_processing.py index 0f08e58b15..b6c7a3165b 100644 --- a/sysquant/optimisation/pre_processing.py +++ b/sysquant/optimisation/pre_processing.py @@ -25,7 +25,6 @@ def __init__( cost_multiplier: float = 1.0, **_ignored_kwargs, ): - self._dict_of_returns = dict_of_returns self._frequency = frequency @@ -129,7 +128,6 @@ def get_gross_returns_for_asset_name( def get_gross_returns_for_asset_name_before_equalisation( self, asset_name: str ) -> dictOfReturnsForOptimisation: - if self.pool_gross_returns: return self.get_pooled_gross_returns_dict() else: @@ -219,7 +217,6 @@ def get_dict_of_cost_dicts_by_asset_name(self) -> dictOfSRacrossAssets: def _calculate_pooled_turnover_costs( asset_name: str, turnovers: dict, dict_of_costs: dictOfSRacrossAssets ) -> dictOfSR: - column_names = turnovers.keys() column_SR_dict = dict( [ @@ -244,7 +241,6 @@ def _calculate_pooled_turnover_costs( def _calculate_pooled_turnover_cost_for_column( asset_name: str, turnovers: dict, dict_of_costs: dict, column_name ) -> float: - cost_per_turnover_this_asset = _calculate_cost_per_turnover( asset_name, column_name=column_name, @@ -265,7 +261,6 @@ def _average_turnover(turnovers, column_name): def _calculate_cost_per_turnover( asset_name: str, column_name: str, turnovers: dict, dict_of_costs: dict ): - turnover = _turnover_for_asset_and_column(asset_name, column_name, turnovers) if turnover > 0: cost = _cost_for_asset_and_column(asset_name, column_name, dict_of_costs) @@ -278,10 +273,8 @@ def _calculate_cost_per_turnover( def _turnover_for_asset_and_column(asset_name: str, column_name: str, turnovers: dict): - return turnovers[column_name][asset_name] def _cost_for_asset_and_column(asset_name: str, column_name: str, dict_of_costs: dict): - return dict_of_costs[asset_name][column_name] diff --git a/sysquant/optimisation/shared.py b/sysquant/optimisation/shared.py index 78a3b86132..eb949aa2f0 100644 --- a/sysquant/optimisation/shared.py +++ b/sysquant/optimisation/shared.py @@ -19,7 +19,6 @@ def optimise_given_estimates( equalise_vols: bool = True, **_ignored_kwargs, ) -> estimatesWithPortfolioWeights: - estimates = estimates.equalise_estimates( equalise_vols=equalise_vols, equalise_SR=equalise_SR, @@ -57,7 +56,6 @@ def sigma_from_corr_and_std(stdev_list: list, corrmatrix: list): def optimise_from_sigma_and_mean_list(sigma: np.array, mean_list: list) -> list: - mus = np.array(mean_list, ndmin=2).transpose() number_assets = sigma.shape[1] start_weights = [1.0 / number_assets] * number_assets diff --git a/sysquant/portfolio_risk.py b/sysquant/portfolio_risk.py index 28695dcf6a..0e3768a704 100644 --- a/sysquant/portfolio_risk.py +++ b/sysquant/portfolio_risk.py @@ -18,7 +18,6 @@ def calc_sum_annualised_risk_given_portfolio_weights( portfolio_weights: seriesOfPortfolioWeights, pd_of_stdev: seriesOfStdevEstimates ) -> pd.Series: - instrument_list = list(portfolio_weights.columns) aligned_stdev = pd_of_stdev[instrument_list].reindex(portfolio_weights.index) @@ -33,7 +32,6 @@ def calc_portfolio_risk_series( list_of_correlations: CorrelationList, pd_of_stdev: seriesOfStdevEstimates, ) -> pd.Series: - risk_series = [] common_index = list(portfolio_weights.index) progress = progressBar( @@ -66,7 +64,6 @@ def get_covariance_matrix( pd_of_stdev: seriesOfStdevEstimates, relevant_date: datetime.datetime, ) -> covarianceEstimate: - instrument_list = list(pd_of_stdev.columns) correlation_estimate = get_correlation_matrix( relevant_date=relevant_date, diff --git a/sysquant/returns.py b/sysquant/returns.py index fb96f45184..c27a885baa 100644 --- a/sysquant/returns.py +++ b/sysquant/returns.py @@ -105,7 +105,6 @@ def get_average_return(self) -> float: def adjust_returns_for_SR_costs( self, dict_of_SR_costs: dictOfSR ) -> "dictOfReturnsForOptimisation": - net_returns_dict = dict( [ ( @@ -120,7 +119,6 @@ def adjust_returns_for_SR_costs( return net_returns_dict def single_resampled_set_of_returns(self, frequency: str) -> returnsForOptimisation: - returns_as_list = listOfDataFrames(self.values()) pooled_length = len(returns_as_list) @@ -140,7 +138,6 @@ def single_resampled_set_of_returns(self, frequency: str) -> returnsForOptimisat def _adjust_df_for_SR_costs(gross_returns: pd.DataFrame, dict_of_SR_costs: dictOfSR): - net_returns_as_dict = dict( [ ( @@ -161,7 +158,6 @@ def _adjust_df_for_SR_costs(gross_returns: pd.DataFrame, dict_of_SR_costs: dictO def _adjust_df_column_for_SR_costs( gross_returns: pd.DataFrame, dict_of_SR_costs: dictOfSR, column_name: str ): - # Returns always business days daily_gross_returns_for_column = gross_returns[column_name] @@ -183,7 +179,6 @@ def _adjust_df_column_for_SR_costs( def _get_average_return_in_dict_for_column( returns_dict: dictOfReturnsForOptimisation, column: str ) -> float: - ## all daily data so can take an average series_of_returns = [ returns_series[column].values for returns_series in returns_dict.values() diff --git a/systems/accounts/account_buffering_subsystem.py b/systems/accounts/account_buffering_subsystem.py index e3ca67bbc7..cf4d094752 100644 --- a/systems/accounts/account_buffering_subsystem.py +++ b/systems/accounts/account_buffering_subsystem.py @@ -73,7 +73,6 @@ def _get_buffered_subsystem_position_given_optimal_position_and_buffers( pos_buffers: pd.DataFrame, roundpositions: bool = True, ) -> pd.Series: - self.log.debug("Calculating buffered subsystem positions") trade_to_edge = self.config.buffer_trade_to_edge diff --git a/systems/accounts/account_buffering_system.py b/systems/accounts/account_buffering_system.py index 3cc4b21d4e..c5d0361a61 100644 --- a/systems/accounts/account_buffering_system.py +++ b/systems/accounts/account_buffering_system.py @@ -106,7 +106,6 @@ def _get_buffered_position_given_optimal_position_and_buffers( pos_buffers: pd.DataFrame, roundpositions: bool = True, ) -> pd.Series: - self.log.debug("Calculating buffered positions") trade_to_edge = self.config.buffer_trade_to_edge diff --git a/systems/accounts/account_costs.py b/systems/accounts/account_costs.py index afb7cbc854..c9d2c8e20b 100644 --- a/systems/accounts/account_costs.py +++ b/systems/accounts/account_costs.py @@ -124,7 +124,6 @@ def _get_SR_transaction_cost_instr_forecast_for_list( def _get_forecast_length_weighting_for_list_of_instruments( self, instrument_code_list: list, rule_variation_name: str ) -> list: - forecast_lengths = [ self._get_forecast_length_for_instrument_rule( instrument_code, rule_variation_name @@ -150,7 +149,6 @@ def _get_forecast_length_for_instrument_rule( def _get_SR_transaction_cost_of_rule_for_individual_instrument( self, instrument_code: str, rule_variation_name: str ) -> float: - # note the turnover may still be pooled.. turnover = self.forecast_turnover(instrument_code, rule_variation_name) @@ -165,7 +163,6 @@ def _get_SR_transaction_cost_of_rule_for_individual_instrument( def get_SR_cost_given_turnover( self, instrument_code: str, turnover: float ) -> float: - SR_cost_trading = self.get_SR_trading_cost_only_given_turnover( instrument_code, turnover ) @@ -233,7 +230,6 @@ def forecast_turnover( def _forecast_turnover_pooled( self, instrument_code: str, rule_variation_name: str ) -> float: - instrument_code_list = self.has_same_rules_as_code(instrument_code) turnover_for_SR = self._forecast_turnover_for_list( instrument_code_list, rule_variation_name=rule_variation_name @@ -288,7 +284,6 @@ def _forecast_turnover_for_list_by_instrument( def _forecast_turnover_for_individual_instrument( self, instrument_code: str, rule_variation_name: str ) -> float: - forecast = self.get_capped_forecast(instrument_code, rule_variation_name) average_forecast_for_turnover = self.average_forecast() diff --git a/systems/accounts/account_forecast.py b/systems/accounts/account_forecast.py index 1cedce84f6..3c28dad740 100644 --- a/systems/accounts/account_forecast.py +++ b/systems/accounts/account_forecast.py @@ -16,7 +16,6 @@ class accountForecast(accountCosts): def pandl_for_instrument_forecast_weighted_within_trading_rule( self, instrument_code: str, rule_variation_name: str, delayfill: bool = True ) -> accountCurve: - pandl_for_instrument_forecast = self.pandl_for_instrument_forecast( instrument_code, rule_variation_name, delayfill=delayfill ) @@ -35,7 +34,6 @@ def pandl_for_instrument_forecast_weighted_within_trading_rule( def pandl_for_instrument_forecast_weighted( self, instrument_code: str, rule_variation_name: str, delayfill: bool = True ) -> accountCurve: - pandl_for_instrument_forecast = self.pandl_for_instrument_forecast( instrument_code, rule_variation_name, delayfill=delayfill ) @@ -88,7 +86,6 @@ def _total_unnormalised_weight_for_trading_rule( def _normalised_weight_for_forecast_and_instrument( self, instrument_code: str, rule_variation_name: str ) -> pd.Series: - weight = self._unnormalised_weight_for_forecast_and_instrument( instrument_code=instrument_code, rule_variation_name=rule_variation_name ) @@ -135,7 +132,6 @@ def _total_unnormalised_weight_for_instrument( def _unnormalised_weight_for_forecast_and_instrument( self, instrument_code: str, rule_variation_name: str ) -> pd.Series: - idm = self.instrument_diversification_multiplier() fdm = self.forecast_diversification_multiplier(instrument_code) instrument_weight = self.specific_instrument_weight(instrument_code) @@ -240,7 +236,6 @@ def pandl_for_instrument_forecast( delayfill=True, value_per_point=ARBITRARY_VALUE_OF_PRICE_POINT, ) -> accountCurve: - if daily_returns_volatility is arg_not_supplied: daily_returns_volatility = robust_daily_vol_given_price(price) @@ -279,7 +274,6 @@ def pandl_for_instrument_forecast( def _get_notional_position_for_forecast( normalised_forecast: pd.Series, average_notional_position: pd.Series ) -> pd.Series: - aligned_average = average_notional_position.reindex( normalised_forecast.index, method="ffill" ) @@ -293,7 +287,6 @@ def _get_average_notional_position( risk_target: float = ARBITRARY_FORECAST_ANNUAL_RISK_TARGET_PERCENTAGE, value_per_point=ARBITRARY_VALUE_OF_PRICE_POINT, ) -> pd.Series: - daily_risk_target = risk_target / ROOT_BDAYS_INYEAR daily_cash_vol_target = capital * daily_risk_target @@ -306,7 +299,6 @@ def _get_average_notional_position( def _get_normalised_forecast( forecast: pd.Series, target_abs_forecast: float = 10.0 ) -> pd.Series: - normalised_forecast = forecast / target_abs_forecast return normalised_forecast diff --git a/systems/accounts/account_inputs.py b/systems/accounts/account_inputs.py index 278dfa71a4..6ebf4b5193 100644 --- a/systems/accounts/account_inputs.py +++ b/systems/accounts/account_inputs.py @@ -17,7 +17,6 @@ def get_raw_price(self, instrument_code: str) -> pd.Series: def get_instrument_prices_for_position_or_forecast( self, instrument_code: str, position_or_forecast: pd.Series = arg_not_supplied ) -> pd.Series: - if position_or_forecast is arg_not_supplied: return self.get_daily_prices(instrument_code) @@ -36,7 +35,6 @@ def get_instrument_prices_for_position_or_forecast( def instrument_prices_for_position_or_forecast_infer_frequency( self, instrument_code: str, position_or_forecast: pd.Series = arg_not_supplied ) -> pd.Series: - try: frequency = infer_frequency(position_or_forecast) if frequency is BUSINESS_DAY_FREQ: @@ -74,7 +72,6 @@ def get_capped_forecast( @diagnostic() def get_daily_returns_volatility(self, instrument_code: str) -> pd.Series: - system = self.parent returns_vol = system.rawdata.daily_returns_volatility(instrument_code) diff --git a/systems/accounts/account_instruments.py b/systems/accounts/account_instruments.py index de314acebb..c0f24bda27 100644 --- a/systems/accounts/account_instruments.py +++ b/systems/accounts/account_instruments.py @@ -120,7 +120,6 @@ def _pandl_for_instrument_with_SR_costs( delayfill: bool = True, roundpositions: bool = True, ) -> accountCurve: - price = self.get_instrument_prices_for_position_or_forecast( instrument_code, position_or_forecast=positions ) @@ -162,7 +161,6 @@ def _pandl_for_instrument_with_SR_costs( def turnover_at_portfolio_level( self, instrument_code: str, roundpositions: bool = True ) -> float: - ## assumes we use all capital average_position_for_turnover = self.get_average_position_at_subsystem_level( instrument_code @@ -184,7 +182,6 @@ def _pandl_for_instrument_with_cash_costs( delayfill: bool = True, roundpositions: bool = True, ) -> accountCurve: - if not roundpositions: self.log.warning( "Using roundpositions=False with cash costs may lead to inaccurate costs (fixed costs, eg commissions will be overstated!!!" diff --git a/systems/accounts/account_subsystem.py b/systems/accounts/account_subsystem.py index d3249793db..4a612eb45c 100644 --- a/systems/accounts/account_subsystem.py +++ b/systems/accounts/account_subsystem.py @@ -14,7 +14,6 @@ class accountSubsystem(accountBufferingSubSystemLevel): def pandl_across_subsystems( self, delayfill=True, roundpositions=False ) -> accountCurveGroup: - instrument_list = self.get_instrument_list() pandl_across_subsystems = self.pandl_across_subsystems_given_instrument_list( @@ -27,7 +26,6 @@ def pandl_across_subsystems( def pandl_across_subsystems_given_instrument_list( self, instrument_list: list, delayfill=True, roundpositions=False ) -> accountCurveGroup: - dict_of_pandl_across_subsystems = dict( [ ( @@ -104,7 +102,6 @@ def pandl_for_subsystem( def _pandl_for_subsystem_with_SR_costs( self, instrument_code, delayfill=True, roundpositions=False ) -> accountCurve: - pandl_calculator = self._pandl_calculator_for_subsystem_with_SR_costs( instrument_code=instrument_code, delayfill=delayfill, @@ -119,7 +116,6 @@ def _pandl_for_subsystem_with_SR_costs( def _pandl_calculator_for_subsystem_with_SR_costs( self, instrument_code, delayfill=True, roundpositions=False ) -> pandlCalculationWithSRCosts: - positions = self.get_buffered_subsystem_position(instrument_code) price = self.get_instrument_prices_for_position_or_forecast( instrument_code, position_or_forecast=positions @@ -159,7 +155,6 @@ def _pandl_calculator_for_subsystem_with_SR_costs( def _pandl_for_subsystem_with_cash_costs( self, instrument_code, delayfill=True, roundpositions=True ) -> accountCurve: - pandl_calculator = self._pandl_calculator_for_subsystem_with_cash_costs( instrument_code=instrument_code, delayfill=delayfill, @@ -174,7 +169,6 @@ def _pandl_for_subsystem_with_cash_costs( def _pandl_calculator_for_subsystem_with_cash_costs( self, instrument_code, delayfill=True, roundpositions=True ) -> pandlCalculationWithCashCostsAndFills: - raw_costs = self.get_raw_cost_data(instrument_code) positions = self.get_buffered_subsystem_position(instrument_code) price = self.get_instrument_prices_for_position_or_forecast( diff --git a/systems/accounts/account_trading_rules.py b/systems/accounts/account_trading_rules.py index 4ca8956124..3557777c45 100644 --- a/systems/accounts/account_trading_rules.py +++ b/systems/accounts/account_trading_rules.py @@ -16,7 +16,6 @@ class accountTradingRules(accountForecast): def pandl_for_trading_rule_weighted( self, rule_variation_name: str, delayfill: bool = True ) -> accountCurveGroup: - list_of_instruments = self.get_instrument_list() dict_of_pandl_by_instrument = dict( [ @@ -46,7 +45,6 @@ def pandl_for_trading_rule_weighted( def pandl_for_trading_rule_unweighted( self, rule_variation_name: str, delayfill: bool = True ) -> accountCurveGroup: - list_of_instruments = self.get_instrument_list() dict_of_pandl_by_instrument = dict( [ @@ -76,7 +74,6 @@ def pandl_for_trading_rule_unweighted( def pandl_for_trading_rule( self, rule_variation_name: str, delayfill: bool = True ) -> accountCurveGroup: - # If I want the performance of a given trading rule across individual # instruments in isolation, then I need to take pandl_for_trading_rule_weighted # and normalise it so that the returns are as a proportion of the sum of @@ -114,7 +111,6 @@ def pandl_for_trading_rule( def pandl_for_all_trading_rules( self, delayfill: bool = True ) -> nestedAccountCurveGroup: - ## group of pandl_for_trading_rule_weighted list_of_rules = self.list_of_trading_rules() @@ -136,7 +132,6 @@ def pandl_for_all_trading_rules( @diagnostic(not_pickable=True) def pandl_for_all_trading_rules_unweighted(self, delayfill: bool = True): - # group of pandl_for_trading_rule list_of_rules = self.list_of_trading_rules() @@ -160,7 +155,6 @@ def pandl_for_all_trading_rules_unweighted(self, delayfill: bool = True): def pandl_for_instrument_rules( self, instrument_code: str, delayfill: bool = True ) -> accountCurveGroup: - # how all trading rules have done for a particular instrument, weighted list_of_rules = self.list_of_rules_for_code(instrument_code) dict_of_pandl_by_rule = dict( @@ -194,7 +188,6 @@ def pandl_for_instrument_rules_unweighted( trading_rule_list=arg_not_supplied, delayfill: bool = True, ) -> accountCurveGroup: - # (unweighted group - elements are pandl_for_instrument_forecast across trading rules) if trading_rule_list is arg_not_supplied: trading_rule_list = self.list_of_rules_for_code(instrument_code) diff --git a/systems/accounts/account_with_multiplier.py b/systems/accounts/account_with_multiplier.py index 62ce39d5ea..9dc363fa8d 100644 --- a/systems/accounts/account_with_multiplier.py +++ b/systems/accounts/account_with_multiplier.py @@ -17,7 +17,6 @@ class accountWithMultiplier(accountPortfolio, accountBufferingSystemLevel): @output(not_pickable=True) def portfolio_with_multiplier(self, delayfill=True, roundpositions=True): - self.log.info("Calculating pandl for portfolio with multiplier") capital = self.get_actual_capital() instruments = self.get_instrument_list() diff --git a/systems/accounts/curves/account_curve.py b/systems/accounts/curves/account_curve.py index b663dfb119..9e238958d4 100644 --- a/systems/accounts/curves/account_curve.py +++ b/systems/accounts/curves/account_curve.py @@ -26,7 +26,6 @@ def __init__( is_percentage: bool = False, weighted=False, ): - as_pd_series = pandl_calculator_with_costs.as_pd_series_for_frequency( percent=is_percentage, curve_type=curve_type, frequency=frequency ) @@ -351,7 +350,6 @@ def demeaned_remove_zeros(self): return demeaned_remove_zeros(x) def stats(self): - stats_list = [ "min", "max", diff --git a/systems/accounts/curves/account_curve_group.py b/systems/accounts/curves/account_curve_group.py index f1c763ccfe..8215ed503e 100644 --- a/systems/accounts/curves/account_curve_group.py +++ b/systems/accounts/curves/account_curve_group.py @@ -15,7 +15,6 @@ class accountCurveGroup(accountCurve): def __init__(self, dict_of_account_curves: dictOfAccountCurves, capital, **kwargs): - total_pandl_calculator = dict_of_account_curves.summed_pandl_calculator( capital=capital ) @@ -46,7 +45,6 @@ def to_frame(self) -> pd.DataFrame: def get_stats( self, stat_name: str, curve_type: str = "net", freq: str = "daily" ) -> statsDict: - return statsDict(self, item=stat_name, freq=freq, curve_type=curve_type) ## TO RETURN A 'NEW' ACCOUNT CURVE GROUP diff --git a/systems/accounts/curves/nested_account_curve_group.py b/systems/accounts/curves/nested_account_curve_group.py index b9ea0be5db..ad2c62bfba 100644 --- a/systems/accounts/curves/nested_account_curve_group.py +++ b/systems/accounts/curves/nested_account_curve_group.py @@ -17,7 +17,6 @@ def __init__( capital, **kwargs, ): - super().__init__(nested_dict_of_account_curves, capital=capital, **kwargs) self._nested_dict_of_account_curves = nested_dict_of_account_curves diff --git a/systems/accounts/curves/stats_dict.py b/systems/accounts/curves/stats_dict.py index 17849fffbe..c97c58445d 100644 --- a/systems/accounts/curves/stats_dict.py +++ b/systems/accounts/curves/stats_dict.py @@ -37,7 +37,6 @@ def __init__( curve_type="net", percent=True, ): - dict_of_results_by_stat = self.dict_of_results_by_asset_name( item=item, account_curve_group=account_curve_group, @@ -59,7 +58,6 @@ def dict_of_results_by_asset_name_equal_weighted( curve_type_str="net", is_percentage=True, ): - weight = 1.0 / len(self.keys()) unweighted_results = self.dict_of_results_by_asset_name( item=item, @@ -86,7 +84,6 @@ def dict_of_results_by_asset_name_timeweighted( curve_type_str="net", is_percentage=True, ): - time_weights_dict = self._time_weights() unweighted_results = self.dict_of_results_by_asset_name( item=item, @@ -116,7 +113,6 @@ def dict_of_results_by_asset_name( curve_type_str="net", is_percentage=True, ): - if account_curve_group is arg_not_supplied: account_curve_group = self.account_curve_group @@ -148,7 +144,6 @@ def statresult_for_item( curve_type_str="net", is_percentage=True, ): - if item is arg_not_supplied: item = self.item @@ -172,7 +167,6 @@ def account_curve_for_asset( curve_type_str="net", is_percentage=True, ) -> accountCurve: - if account_curve_group is arg_not_supplied: account_curve_group = self.account_curve_group diff --git a/systems/accounts/order_simulator/account_curve_order_simulator.py b/systems/accounts/order_simulator/account_curve_order_simulator.py index 93f6d0c33d..36f55dc58a 100644 --- a/systems/accounts/order_simulator/account_curve_order_simulator.py +++ b/systems/accounts/order_simulator/account_curve_order_simulator.py @@ -16,7 +16,6 @@ class AccountWithOrderSimulator(Account): def pandl_for_subsystem( self, instrument_code, delayfill=True, roundpositions=True ) -> accountCurve: - self.log.debug( "Calculating pandl for subsystem for instrument %s" % instrument_code, instrument_code=instrument_code, @@ -41,7 +40,6 @@ def pandl_for_subsystem( def _pandl_calculator_for_subsystem_with_cash_costs( self, instrument_code, delayfill=True, roundpositions=True ) -> pandlCalculationWithCashCostsAndFills: - ## Should be checked earlier, but just in case called directly ## Order simulator doesn't work otherwise assert delayfill @@ -102,7 +100,6 @@ def pandl_for_instrument( def _pandl_calculator_for_instrument_with_cash_costs( self, instrument_code, delayfill=True, roundpositions=True ) -> pandlCalculationWithCashCostsAndFills: - ## Should be checked earlier, but just in case called directly ## Order simulator doesn't work otherwise assert delayfill diff --git a/systems/accounts/order_simulator/fills_and_orders.py b/systems/accounts/order_simulator/fills_and_orders.py index c84c11c974..c5a8da8438 100644 --- a/systems/accounts/order_simulator/fills_and_orders.py +++ b/systems/accounts/order_simulator/fills_and_orders.py @@ -66,7 +66,6 @@ def fill_from_simple_limit_order( market_price: float, fill_datetime: datetime.datetime, ) -> Fill: - limit_price = simple_order.limit_price if simple_order.quantity > 0: if limit_price > market_price: @@ -94,7 +93,6 @@ def fill_from_simple_market_order( market_price: float, fill_datetime: datetime.datetime, ) -> Fill: - return Fill( fill_datetime, simple_order.quantity, diff --git a/systems/accounts/order_simulator/hourly_limit_orders.py b/systems/accounts/order_simulator/hourly_limit_orders.py index 275d0fafe5..9049f04ae4 100644 --- a/systems/accounts/order_simulator/hourly_limit_orders.py +++ b/systems/accounts/order_simulator/hourly_limit_orders.py @@ -38,7 +38,6 @@ def generate_order_and_fill_at_idx_point_for_limit_orders( current_datetime: datetime.datetime, data_for_idx: DataAtIDXPoint, ) -> Tuple[ListOfSimpleOrdersWithDate, Fill]: - current_optimal_position = data_for_idx.current_optimal_position if np.isnan(current_optimal_position): quantity = 0 diff --git a/systems/accounts/order_simulator/hourly_market_orders.py b/systems/accounts/order_simulator/hourly_market_orders.py index b91d378ce6..87a403dae3 100644 --- a/systems/accounts/order_simulator/hourly_market_orders.py +++ b/systems/accounts/order_simulator/hourly_market_orders.py @@ -25,7 +25,6 @@ def _build_hourly_series_data_for_order_simulator( instrument_code: str, is_subsystem: bool = False, ) -> OrdersSeriesData: - price_series = system_accounts_stage.get_hourly_prices(instrument_code) if is_subsystem: unrounded_positions = ( diff --git a/systems/accounts/order_simulator/pandl_order_simulator.py b/systems/accounts/order_simulator/pandl_order_simulator.py index 38d88e6586..ca65400d3c 100644 --- a/systems/accounts/order_simulator/pandl_order_simulator.py +++ b/systems/accounts/order_simulator/pandl_order_simulator.py @@ -138,7 +138,6 @@ def build_daily_series_data_for_order_simulator( instrument_code: str, is_subsystem: bool = False, ) -> OrdersSeriesData: - price_series = system_accounts_stage.get_daily_prices(instrument_code) if is_subsystem: unrounded_positions = ( @@ -172,7 +171,6 @@ def generate_positions_orders_and_fills_from_series_data( passed_idx_data_function: Callable, passed_orders_fills_function: Callable, ) -> PositionsOrdersFills: - master_index = series_data.price_series.index list_of_positions = [] @@ -244,7 +242,6 @@ def generate_order_and_fill_at_idx_point_for_market_orders( current_datetime: datetime.datetime, data_for_idx: DataAtIDXPoint, ) -> ListOfSimpleOrdersAndResultingFill: - current_optimal_position = data_for_idx.current_optimal_position next_datetime = data_for_idx.next_datetime next_price = data_for_idx.next_price diff --git a/systems/accounts/pandl_calculators/pandl_calculation.py b/systems/accounts/pandl_calculators/pandl_calculation.py index 1a619891d7..0cd00c2e22 100644 --- a/systems/accounts/pandl_calculators/pandl_calculation.py +++ b/systems/accounts/pandl_calculators/pandl_calculation.py @@ -18,7 +18,6 @@ def __init__( delayfill=False, passed_diagnostic_df: pd.DataFrame = arg_not_supplied, ): - self._price = price self._positions = positions self._fx = fx @@ -41,7 +40,6 @@ def calculations_df(self) -> pd.Series: raise NotImplemented("Not implemented") def weight(self, weight: pd.Series): - weighted_capital = apply_weighting(weight, self.capital) weighted_positions = apply_weighting(weight, self.positions) @@ -58,7 +56,6 @@ def weight(self, weight: pd.Series): def capital_as_pd_series_for_frequency( self, frequency: Frequency = DAILY_PRICE_FREQ ) -> pd.Series: - capital = self.capital resample_freq = from_config_frequency_pandas_resample(frequency) capital_at_frequency = capital.resample(resample_freq).ffill() @@ -68,7 +65,6 @@ def capital_as_pd_series_for_frequency( def as_pd_series_for_frequency( self, frequency: Frequency = DAILY_PRICE_FREQ, **kwargs ) -> pd.Series: - as_pd_series = self.as_pd_series(**kwargs) resample_freq = from_config_frequency_pandas_resample(frequency) @@ -121,7 +117,6 @@ def _pandl_in_instrument_ccy_given_points_pandl( return pandl_in_points * point_size def pandl_in_points(self) -> pd.Series: - pandl_in_points = calculate_pandl(positions=self.positions, prices=self.price) return pandl_in_points diff --git a/systems/accounts/pandl_calculators/pandl_calculation_dict.py b/systems/accounts/pandl_calculators/pandl_calculation_dict.py index a102fb349d..068f97d6d5 100644 --- a/systems/accounts/pandl_calculators/pandl_calculation_dict.py +++ b/systems/accounts/pandl_calculators/pandl_calculation_dict.py @@ -11,7 +11,6 @@ def __init__( costs_pandl_in_base_currency: pd.Series, capital: pd.Series, ): - super().__init__(price=pd.Series(dtype="float64"), capital=capital) self._pandl_in_base_currency = pandl_in_base_currency @@ -65,7 +64,6 @@ def _index_to_align_capital_to(self): class dictOfPandlCalculatorsWithGenericCosts(dict): def sum(self, capital) -> pandlCalculationWithoutPositions: - pandl_in_base_currency = self.sum_of_pandl_in_base_currency() costs_pandl_in_base_currency = self.sum_of_costs_pandl_in_base_currency() diff --git a/systems/accounts/pandl_calculators/pandl_cash_costs.py b/systems/accounts/pandl_calculators/pandl_cash_costs.py index 15f830becc..0c31da1b42 100644 --- a/systems/accounts/pandl_calculators/pandl_cash_costs.py +++ b/systems/accounts/pandl_calculators/pandl_cash_costs.py @@ -188,7 +188,6 @@ def last_date_with_positions(self) -> datetime.datetime: return self.positions.index[-1] def normalise_costs_in_instrument_currency(self, costs_as_pd_series) -> pd.Series: - dont_normalise_currency_costs = not self.vol_normalise_currency_costs if dont_normalise_currency_costs: return costs_as_pd_series diff --git a/systems/accounts/pandl_calculators/pandl_generic_costs.py b/systems/accounts/pandl_calculators/pandl_generic_costs.py index 22fd97bf0b..434c195d46 100644 --- a/systems/accounts/pandl_calculators/pandl_generic_costs.py +++ b/systems/accounts/pandl_calculators/pandl_generic_costs.py @@ -13,7 +13,6 @@ class pandlCalculationWithGenericCosts(pandlCalculation): def weight(self, weight: pd.Series): - weighted_capital = apply_weighting(weight, self.capital) weighted_positions = apply_weighting(weight, self.positions) diff --git a/systems/accounts/pandl_calculators/pandl_using_fills.py b/systems/accounts/pandl_calculators/pandl_using_fills.py index ad20085f40..804fd18b23 100644 --- a/systems/accounts/pandl_calculators/pandl_using_fills.py +++ b/systems/accounts/pandl_calculators/pandl_using_fills.py @@ -41,7 +41,6 @@ def using_positions_and_prices_merged_from_fills( fills: ListOfFills, **kwargs, ): - merged_prices = merge_fill_prices_with_prices(price, fills) return pandlCalculation(price=merged_prices, positions=positions, **kwargs) @@ -60,7 +59,6 @@ def fills(self) -> ListOfFills: return fills def _infer_fills_from_position(self) -> ListOfFills: - # positions will have delayfill and round applied to them already positions = self.positions if positions is arg_not_supplied: diff --git a/systems/basesystem.py b/systems/basesystem.py index 302b9815e1..45eeb5eee8 100644 --- a/systems/basesystem.py +++ b/systems/basesystem.py @@ -187,7 +187,6 @@ def _get_instrument_list_from_config( remove_short_history=False, days_required=750, ) -> list: - instrument_list = self._get_raw_instrument_list_from_config() instrument_list = self._remove_instruments_from_instrument_list( instrument_list, @@ -232,7 +231,6 @@ def _remove_instruments_from_instrument_list( remove_short_history=False, days_required: int = 750, ): - list_of_instruments_to_remove = self.get_list_of_instruments_to_remove( remove_duplicates=remove_duplicates, remove_short_history=remove_short_history, @@ -265,7 +263,6 @@ def get_list_of_markets_not_trading_but_with_data( remove_short_history=False, days_required=750, ) -> list: - not_trading = self.get_list_of_instruments_to_remove( remove_duplicates=remove_duplicates, remove_short_history=remove_short_history, @@ -298,7 +295,6 @@ def get_list_of_instruments_to_remove( remove_short_history=False, days_required=750, ) -> list: - list_to_remove = [] if remove_duplicates: list_of_duplicates = self.get_list_of_duplicate_instruments_to_remove() @@ -395,7 +391,6 @@ def get_list_of_short_history(self, days_required: int = 750) -> list: def get_instrument_weights_from_config(config: Config) -> dict: - instrument_weights_config = getattr(config, "instrument_weights", None) if instrument_weights_config is None: raise Exception("Instrument config not available") diff --git a/systems/buffering.py b/systems/buffering.py index ae12f18003..9688578247 100644 --- a/systems/buffering.py +++ b/systems/buffering.py @@ -41,7 +41,6 @@ def calculate_buffers( idm: pd.Series = arg_not_supplied, log=get_logger(""), ) -> pd.Series: - log.debug( "Calculating buffers for %s" % instrument_code, instrument_code=instrument_code, @@ -138,7 +137,6 @@ def get_position_method_buffer( def get_buffer_if_not_buffering(position: pd.Series) -> pd.Series: - EPSILON_POSITION = 0.001 buffer = pd.Series([EPSILON_POSITION] * position.shape[0], index=position.index) @@ -152,7 +150,6 @@ def _calculate_forecast_buffer_method( idm: pd.Series = arg_not_supplied, instr_weight_this_code: pd.Series = arg_not_supplied, ): - if instr_weight_this_code is arg_not_supplied: instr_weight_this_code_indexed = 1.0 else: diff --git a/systems/diagoutput.py b/systems/diagoutput.py index 5e726dce66..643f4d9172 100644 --- a/systems/diagoutput.py +++ b/systems/diagoutput.py @@ -252,7 +252,6 @@ def output_config_with_estimated_parameters( "instrument_div_multiplier", ], ): - output_dict = {} for config_item in attr_names: dict_function = getattr(self, config_item) @@ -276,7 +275,6 @@ def yaml_config_with_estimated_parameters( "instrument_div_multiplier", ], ): - output_dict = self.output_config_with_estimated_parameters( attr_names=attr_names ) diff --git a/systems/forecast_combine.py b/systems/forecast_combine.py index fb62d0fed1..6c49094ae6 100755 --- a/systems/forecast_combine.py +++ b/systems/forecast_combine.py @@ -110,7 +110,6 @@ def get_combined_forecast(self, instrument_code: str) -> pd.Series: def get_raw_combined_forecast_before_mapping( self, instrument_code: str ) -> pd.Series: - # sum raw_combined_forecast = self.get_combined_forecast_without_multiplier( instrument_code @@ -524,7 +523,6 @@ def get_raw_monthly_forecast_weights(self, instrument_code: str) -> pd.DataFrame def _remove_expensive_rules_from_weights( self, instrument_code, monthly_forecast_weights: pd.DataFrame ) -> pd.DataFrame: - cheap_rules = self.cheap_trading_rules_post_processing(instrument_code) if len(cheap_rules) == 0: ## special case all zeros @@ -662,7 +660,6 @@ def returns_pre_processing( def get_turnover_for_list_of_rules( self, codes_to_use: list, list_of_rules: list ) -> turnoverDataAcrossTradingRules: - turnover_dict = dict( [ (rule_name, self.get_turnover_for_forecast(codes_to_use, rule_name)) @@ -861,7 +858,6 @@ def get_SR_cost_for_instrument_forecast( @property def accounts_stage(self): - if not hasattr(self.parent, "accounts"): raise missingData @@ -1376,7 +1372,6 @@ def _cap_combined_forecast( forecast_cap: float = 20.0, forecast_floor: float = -20, ) -> pd.Series: - capped_combined_forecast = raw_multiplied_combined_forecast.clip( lower=forecast_floor, upper=forecast_cap ) @@ -1468,7 +1463,6 @@ def _get_forecast_weights_for_instrument_with_autogrouping( def _get_fixed_fdm_scalar_value_from_config( forecast_div_multiplier_config: dict, instrument_code: str, log ) -> float: - error_msg = "" fixed_div_mult = None diff --git a/systems/forecasting.py b/systems/forecasting.py index 4a3005299d..195ec3b282 100644 --- a/systems/forecasting.py +++ b/systems/forecasting.py @@ -127,7 +127,6 @@ def trading_rules(self): return trading_rules def _get_trading_rules_from_passed_rules(self): - # What where we passed when object was created? passed_rules = self.passed_trading_rules diff --git a/systems/portfolio.py b/systems/portfolio.py index 8b653a60e6..cc4d11f7f5 100644 --- a/systems/portfolio.py +++ b/systems/portfolio.py @@ -154,7 +154,6 @@ def get_buffers_for_position(self, instrument_code: str) -> pd.DataFrame: @diagnostic() def get_buffers(self, instrument_code: str) -> pd.Series: - position = self.get_notional_position(instrument_code) vol_scalar = self.get_average_position_at_subsystem_level(instrument_code) log = self.log @@ -272,7 +271,6 @@ def get_notional_position_without_idm(self, instrument_code: str) -> pd.Series: # IDM @dont_cache def get_instrument_diversification_multiplier(self) -> pd.Series: - if self.use_estimated_instrument_div_mult: idm = self.get_estimated_instrument_diversification_multiplier() else: @@ -556,7 +554,6 @@ def get_raw_fixed_instrument_weights(self) -> pd.DataFrame: @diagnostic() def get_fixed_instrument_weights_from_config(self) -> dict: - try: instrument_weights_dict = get_instrument_weights_from_config(self.config) except: @@ -672,7 +669,6 @@ def correlation_estimator_for_subsystem_returns(self): @diagnostic(protected=True, not_pickable=True) def calculation_of_raw_instrument_weights(self): - """ Estimate the instrument weights @@ -700,7 +696,6 @@ def calculation_of_raw_instrument_weights(self): @diagnostic(not_pickable=True) def returns_pre_processor(self) -> returnsPreProcessor: - instrument_list = self.get_instrument_list(for_instrument_weights=True) pandl_across_subsystems_raw = self.pandl_across_subsystems( instrument_list=instrument_list @@ -749,7 +744,6 @@ def _add_zero_weights_to_instrument_weights_df( def allocate_zero_instrument_weights_to_these_instruments( self, auto_remove_bad_instruments: bool = False ) -> list: - config_allocate_zero_instrument_weights_to_these_instruments = ( self.config_allocates_zero_instrument_weights_to_these_instruments( auto_remove_bad_instruments=auto_remove_bad_instruments @@ -768,7 +762,6 @@ def allocate_zero_instrument_weights_to_these_instruments( def config_allocates_zero_instrument_weights_to_these_instruments( self, auto_remove_bad_instruments: bool = False ): - bad_from_config = self.parent.get_list_of_markets_not_trading_but_with_data() config = self.config config_allocates_zero_instrument_weights_to_these_instruments = getattr( @@ -818,7 +811,6 @@ def config_allocates_zero_instrument_weights_to_these_instruments( return allocate_zero_instrument_weights_to_these_instruments def instruments_without_data_or_weights(self) -> list: - subsystem_positions = copy(self._get_all_subsystem_positions()) subsystem_positions[subsystem_positions.isna()] = 0 not_zero = subsystem_positions != 0 @@ -896,7 +888,6 @@ def pandl_across_subsystems( @input def turnover_across_subsystems(self) -> turnoverDataAcrossSubsystems: - instrument_list = self.get_instrument_list(for_instrument_weights=True) turnover_as_list = [ self.accounts_stage.subsystem_turnover(instrument_code) @@ -961,7 +952,6 @@ def capital_multiplier(self): ## RISK @diagnostic() def get_risk_scalar(self) -> pd.Series: - risk_overlay_config = self.config.get_element("risk_overlay") normal_risk = self.get_portfolio_risk_for_original_positions() @@ -1001,7 +991,6 @@ def get_sum_annualised_risk_given_portfolio_weights( self, portfolio_weights: seriesOfPortfolioWeights, ) -> pd.Series: - pd_of_stdev = self.get_stdev_df() risk_series = calc_sum_annualised_risk_given_portfolio_weights( portfolio_weights=portfolio_weights, pd_of_stdev=pd_of_stdev @@ -1022,7 +1011,6 @@ def get_portfolio_risk_for_original_positions_with_shocked_vol(self) -> pd.Serie def get_portfolio_risk_given_weights( self, portfolio_weights: seriesOfPortfolioWeights, use_shocked_vol=False ) -> pd.Series: - list_of_correlations = self.get_list_of_instrument_returns_correlations() pd_of_stdev = self.get_stdev_df(shocked=use_shocked_vol) risk_series = calc_portfolio_risk_series( @@ -1041,7 +1029,6 @@ def get_stdev_df(self, shocked: bool = False) -> seriesOfStdevEstimates: @diagnostic() def get_shocked_df_of_perc_vol(self) -> seriesOfStdevEstimates: - df_of_vol = self.get_df_of_perc_vol() shocked_df_of_vol = df_of_vol.shocked() @@ -1051,7 +1038,6 @@ def get_shocked_df_of_perc_vol(self) -> seriesOfStdevEstimates: def get_position_contracts_for_relevant_date( self, relevant_date: datetime.datetime = arg_not_supplied ) -> portfolioWeights: - position_contracts_as_df = self.get_position_contracts_as_df() position_contracts_at_date = get_row_of_df_aligned_to_weights_as_dict( position_contracts_as_df, relevant_date @@ -1066,7 +1052,6 @@ def get_covariance_matrix( relevant_date: datetime.datetime = arg_not_supplied, correlation_estimation_parameters=arg_not_supplied, ) -> covarianceEstimate: - correlation_estimate = self.get_correlation_matrix( relevant_date=relevant_date, correlation_estimation_parameters=correlation_estimation_parameters, @@ -1287,7 +1272,6 @@ def annualised_percentage_vol(self, instrument_code: str) -> pd.Series: def get_instrument_list( self, for_instrument_weights=False, auto_remove_bad_instruments=False ) -> list: - instrument_list = self.parent.get_instrument_list() if for_instrument_weights: instrument_list = copy(instrument_list) @@ -1354,7 +1338,6 @@ def get_portfolio_weights_from_contract_positions( contract_positions: pd.Series, per_contract_value_as_proportion_of_capital: pd.Series, ) -> pd.Series: - aligned_values = per_contract_value_as_proportion_of_capital.reindex( contract_positions.index, method="ffill" ) diff --git a/systems/positionsizing.py b/systems/positionsizing.py index 920de5c700..b7f45548f5 100644 --- a/systems/positionsizing.py +++ b/systems/positionsizing.py @@ -68,7 +68,6 @@ def get_buffers_for_subsystem_position(self, instrument_code: str) -> pd.Series: @diagnostic() def get_subsystem_buffers(self, instrument_code: str) -> pd.Series: - position = self.get_subsystem_position(instrument_code) vol_scalar = self.get_average_position_at_subsystem_level(instrument_code) diff --git a/systems/provided/attenuate_vol/vol_attenuation_forecast_scale_cap.py b/systems/provided/attenuate_vol/vol_attenuation_forecast_scale_cap.py index 57b5954852..f0343c8cf3 100644 --- a/systems/provided/attenuate_vol/vol_attenuation_forecast_scale_cap.py +++ b/systems/provided/attenuate_vol/vol_attenuation_forecast_scale_cap.py @@ -54,7 +54,6 @@ def get_raw_forecast(self, instrument_code, rule_variation_name): def get_attenuation_for_rule_and_instrument_indexed_to_forecast( self, instrument_code, rule_variation_name ) -> pd.Series: - raw_forecast_before_atten = self.get_raw_forecast_before_attenuation( instrument_code, rule_variation_name ) diff --git a/systems/provided/basic/system.py b/systems/provided/basic/system.py index d23759c115..b112d0066d 100644 --- a/systems/provided/basic/system.py +++ b/systems/provided/basic/system.py @@ -25,7 +25,6 @@ def basic_futures_system( config=arg_not_supplied, trading_rules=arg_not_supplied, ): - if config is arg_not_supplied: config = Config() @@ -53,7 +52,6 @@ def basic_csv_futures_system( config=arg_not_supplied, trading_rules=arg_not_supplied, ): - if data is arg_not_supplied: data = csvFuturesSimData() @@ -66,7 +64,6 @@ def basic_db_futures_system( config=arg_not_supplied, trading_rules=arg_not_supplied, ): - if data is arg_not_supplied: data = dbFuturesSimData() diff --git a/systems/provided/dynamic_small_system_optimise/accounts_stage.py b/systems/provided/dynamic_small_system_optimise/accounts_stage.py index 5ee6a6af5e..f6754b63a6 100644 --- a/systems/provided/dynamic_small_system_optimise/accounts_stage.py +++ b/systems/provided/dynamic_small_system_optimise/accounts_stage.py @@ -12,7 +12,6 @@ class accountForOptimisedStage(Account): @output(not_pickable=True) def optimised_portfolio(self, delayfill=True): - self.log.info("Calculating pandl for portfolio") capital = self.get_notional_capital() instruments = self.get_instrument_list() @@ -40,7 +39,6 @@ def optimised_portfolio(self, delayfill=True): def pandl_for_optimised_instrument( self, instrument_code: str, delayfill: bool = True ) -> accountCurve: - self.log.debug( "Calculating pandl for instrument for %s" % instrument_code, instrument_code=instrument_code, @@ -69,7 +67,6 @@ def optimised_turnover_at_portfolio_level( self, instrument_code: str, ) -> float: - ## assumes we use all capital average_position_for_turnover = self.get_average_position_at_subsystem_level( instrument_code diff --git a/systems/provided/dynamic_small_system_optimise/buffering.py b/systems/provided/dynamic_small_system_optimise/buffering.py index 3e65d79969..4ef3a30989 100644 --- a/systems/provided/dynamic_small_system_optimise/buffering.py +++ b/systems/provided/dynamic_small_system_optimise/buffering.py @@ -15,7 +15,6 @@ class speedControlForDynamicOpt: def calculate_adjustment_factor( speed_control: speedControlForDynamicOpt, tracking_error_of_prior: float ) -> np.array: - ## returns 1.0 if we do an entire trade (ok never happens) ## returns 0.0 if we do none of it if tracking_error_of_prior <= 0: @@ -37,7 +36,6 @@ def adjust_weights_with_factor( per_contract_value_as_np: np.array, adj_factor: float, ): - desired_trades_weight_space = optimised_weights_as_np - prior_weights_as_np adjusted_trades_weight_space = adj_factor * desired_trades_weight_space @@ -56,7 +54,6 @@ def adjust_weights_with_factor( def calculate_adjusting_trades_rounding_in_contract_space( adjusted_trades_weight_space: np.array, per_contract_value_as_np: np.array ) -> np.array: - adjusted_trades_in_contracts = ( adjusted_trades_weight_space / per_contract_value_as_np ) diff --git a/systems/provided/dynamic_small_system_optimise/optimisation.py b/systems/provided/dynamic_small_system_optimise/optimisation.py index ee47405c84..1c00283dad 100644 --- a/systems/provided/dynamic_small_system_optimise/optimisation.py +++ b/systems/provided/dynamic_small_system_optimise/optimisation.py @@ -44,7 +44,6 @@ def __init__( log: pst_logger = get_logger("objectiveFunctionForGreedy"), constraint_function: Callable = arg_not_supplied, ): - self.covariance_matrix = covariance_matrix self.per_contract_value = per_contract_value self.costs = costs @@ -140,7 +139,6 @@ def is_tracking_error_of_prior_smaller_than_buffer(self) -> bool: return tracking_error_smaller_than_buffer def tracking_error_of_prior_weights(self) -> float: - prior_weights = self.weights_prior_as_np_replace_nans_with_zeros tracking_error = self.tracking_error_against_optimal(prior_weights) diff --git a/systems/provided/dynamic_small_system_optimise/optimised_positions_stage.py b/systems/provided/dynamic_small_system_optimise/optimised_positions_stage.py index d8a6b72e87..fafb7ba37d 100644 --- a/systems/provided/dynamic_small_system_optimise/optimised_positions_stage.py +++ b/systems/provided/dynamic_small_system_optimise/optimised_positions_stage.py @@ -80,7 +80,6 @@ def get_optimal_positions_with_fixed_contract_values( previous_positions: portfolioWeights = arg_not_supplied, maximum_positions: portfolioWeights = arg_not_supplied, ) -> portfolioWeights: - obj_instance = self._get_optimal_positions_objective_instance( relevant_date=relevant_date, previous_positions=previous_positions, @@ -106,7 +105,6 @@ def _get_optimal_positions_objective_instance( previous_positions: portfolioWeights = arg_not_supplied, maximum_positions: portfolioWeights = arg_not_supplied, ) -> objectiveFunctionForGreedy: - covariance_matrix = self.get_covariance_matrix(relevant_date=relevant_date) per_contract_value = self.get_per_contract_value(relevant_date) @@ -267,7 +265,6 @@ def get_last_fx_rate(self, instrument_code: str) -> float: def get_covariance_matrix( self, relevant_date: datetime.datetime = arg_not_supplied ) -> covarianceEstimate: - correlation_estimate = self.get_correlation_matrix(relevant_date=relevant_date) stdev_estimate = self.get_stdev_estimate(relevant_date=relevant_date) @@ -281,7 +278,6 @@ def get_covariance_matrix( def get_correlation_matrix( self, relevant_date: datetime.datetime = arg_not_supplied ) -> correlationEstimate: - corr_matrix = self.portfolio_stage.get_correlation_matrix( relevant_date=relevant_date ) @@ -303,7 +299,6 @@ def correlation_shrinkage(self) -> float: def get_stdev_estimate( self, relevant_date: datetime.datetime = arg_not_supplied ) -> stdevEstimates: - return self.portfolio_stage.get_stdev_estimate(relevant_date=relevant_date) def get_per_contract_value( @@ -314,7 +309,6 @@ def get_per_contract_value( def get_current_contract_value_as_proportion_of_capital_for_instrument( self, instrument_code: str ) -> float: - value_as_ts = ( self.get_contract_ts_value_as_proportion_of_capital_for_instrument( instrument_code @@ -325,7 +319,6 @@ def get_current_contract_value_as_proportion_of_capital_for_instrument( def get_contract_ts_value_as_proportion_of_capital_for_instrument( self, instrument_code: str ) -> pd.Series: - return self.portfolio_stage.get_per_contract_value_as_proportion_of_capital( instrument_code ) @@ -395,7 +388,6 @@ def calculate_cost_per_notional_weight_as_proportion_of_capital( capital: float, cost_multiplier: float = 1.0, ) -> float: - dollar_cost = ( cost_multiplier * cost_per_contract diff --git a/systems/provided/dynamic_small_system_optimise/set_up_constraints.py b/systems/provided/dynamic_small_system_optimise/set_up_constraints.py index 761b51604d..a8760fb6ad 100644 --- a/systems/provided/dynamic_small_system_optimise/set_up_constraints.py +++ b/systems/provided/dynamic_small_system_optimise/set_up_constraints.py @@ -103,7 +103,6 @@ def calculations_for_code( optimium_weight: float = np.nan, long_only: bool = False, ): - minimum, maximum = calculate_minima_and_maxima( reduce_only=reduce_only, no_trade=no_trade, @@ -132,7 +131,6 @@ def calculate_minima_and_maxima( max_position: float = arg_not_supplied, weight_prior: float = arg_not_supplied, ) -> tuple: - minimum = -A_VERY_LARGE_NUMBER maximum = A_VERY_LARGE_NUMBER @@ -173,7 +171,6 @@ def calculate_direction( minimum: float = -A_VERY_LARGE_NUMBER, maximum: float = A_VERY_LARGE_NUMBER, ) -> float: - ## always start at zero, so if minima/maxima already bind we can only go up or down if minimum >= 0.0: return 1 diff --git a/systems/provided/example/daily_with_order_simulation.py b/systems/provided/example/daily_with_order_simulation.py index 2b4b26056a..f08efeda88 100644 --- a/systems/provided/example/daily_with_order_simulation.py +++ b/systems/provided/example/daily_with_order_simulation.py @@ -30,7 +30,6 @@ def futures_system( use_vanilla_accounting: bool = False, config_filename="systems.provided.example.daily_with_order_simulation.yaml", ): - if sim_data is arg_not_supplied: sim_data = dbFuturesSimData() diff --git a/systems/provided/example/hourly_with_order_simulation.py b/systems/provided/example/hourly_with_order_simulation.py index 9dc069deb6..813c61bd0d 100644 --- a/systems/provided/example/hourly_with_order_simulation.py +++ b/systems/provided/example/hourly_with_order_simulation.py @@ -34,7 +34,6 @@ def futures_system( use_vanilla_accounting: bool = False, config_filename="systems.provided.example.hourly_with_order_simulator.yaml", ): - if sim_data is arg_not_supplied: sim_data = dbFuturesSimData() diff --git a/systems/provided/example/simplesystemconfig.yaml b/systems/provided/example/simplesystemconfig.yaml index b424b8c643..e80dd94c79 100644 --- a/systems/provided/example/simplesystemconfig.yaml +++ b/systems/provided/example/simplesystemconfig.yaml @@ -21,7 +21,7 @@ forecast_weights: ewmac32: 0.50 forecast_div_multiplier: 1.1 instrument_weights: - EDOLLAR: .4 + SOFR: .4 US10: .1 CORN: .3 SP500: .2 diff --git a/systems/provided/futures_chapter15/futuresconfig.yaml b/systems/provided/futures_chapter15/futuresconfig.yaml index a656b65be9..f5e9ed92b9 100644 --- a/systems/provided/futures_chapter15/futuresconfig.yaml +++ b/systems/provided/futures_chapter15/futuresconfig.yaml @@ -88,7 +88,7 @@ base_currency: "USD" # Portfolio creation # instrument_weights: - EDOLLAR: 0.117 + SOFR: 0.117 US10: 0.117 EUROSTX: 0.20 V2X: 0.098 @@ -116,7 +116,7 @@ style_groups: - carry countries: US: - - EDOLLAR + - SOFR - US10 Europe: - EUROSTX @@ -126,7 +126,7 @@ countries: - CORN asset_classes: Rates: - - EDOLLAR + - SOFR - US10 Risky: - EUROSTX diff --git a/systems/provided/rob_system/run_system.py b/systems/provided/rob_system/run_system.py index 2d1355bde9..7382d25f33 100644 --- a/systems/provided/rob_system/run_system.py +++ b/systems/provided/rob_system/run_system.py @@ -29,7 +29,6 @@ def futures_system( sim_data=arg_not_supplied, config_filename="systems.provided.rob_system.config.yaml" ): - if sim_data is arg_not_supplied: sim_data = dbFuturesSimData() diff --git a/systems/provided/static_small_system_optimise/optimise_small_system.py b/systems/provided/static_small_system_optimise/optimise_small_system.py index c22fff0043..99f98fa266 100644 --- a/systems/provided/static_small_system_optimise/optimise_small_system.py +++ b/systems/provided/static_small_system_optimise/optimise_small_system.py @@ -24,7 +24,6 @@ def find_best_ordered_set_of_instruments( notional_starting_IDM=1.0, capital=500000, ) -> list: - ## 'system' can be precalculated up to the combined forecast stage to save time system.config.notional_trading_capital = capital @@ -83,7 +82,6 @@ def get_correlation_matrix(system) -> correlationEstimate: def find_best_market( system, list_of_instruments: list, minimum_instrument_weight_idm: float ) -> str: - all_results = [] for instrument_code in list_of_instruments: all_results.append( @@ -134,7 +132,6 @@ def find_next_instrument( def SR_for_instrument_list( system, corr_matrix, instrument_list, minimum_instrument_weight_idm ): - estimates = build_estimates( instrument_list=instrument_list, corr_matrix=corr_matrix ) @@ -157,7 +154,6 @@ def SR_for_instrument_list( def build_estimates(instrument_list, corr_matrix, notional_years_data=30): - ## We don't take SR into account mean_estimates = meanEstimates( dict([(instrument_code, 1.0) for instrument_code in instrument_list]) @@ -263,7 +259,6 @@ def net_SR_for_instrument_in_system( minimum_instrument_weight_idm: float, instrument_weight_idm: float, ): - if instrument_weight_idm == 0: return 0.0 diff --git a/systems/rawdata.py b/systems/rawdata.py index 8714559efd..c0786d87f3 100644 --- a/systems/rawdata.py +++ b/systems/rawdata.py @@ -345,7 +345,6 @@ def _aggregate_daily_vol_normalised_returns_for_list_of_instruments( def _daily_vol_normalised_price_for_list_of_instruments( self, list_of_instruments: list ) -> pd.Series: - norm_returns = ( self._aggregate_daily_vol_normalised_returns_for_list_of_instruments( list_of_instruments diff --git a/systems/risk_overlay.py b/systems/risk_overlay.py index dba7fb351d..cae7eba2aa 100644 --- a/systems/risk_overlay.py +++ b/systems/risk_overlay.py @@ -76,7 +76,6 @@ def get_risk_multiplier( def multiplier_given_series_and_limit( risk_measure: pd.Series, risk_limit: float ) -> pd.Series: - limit_as_series = pd.Series( [risk_limit] * len(risk_measure.index), risk_measure.index ) diff --git a/systems/system_cache.py b/systems/system_cache.py index f75b879b65..d30245aef8 100644 --- a/systems/system_cache.py +++ b/systems/system_cache.py @@ -35,7 +35,6 @@ class cacheRef(object): def __init__( self, stage_name, itemname, instrument_code=ALL_KEYNAME, flags="", keyname="" ): - self.stage_name = stage_name self.itemname = itemname self.instrument_code = instrument_code @@ -143,7 +142,6 @@ def can_be_pickled(self): class systemCache(dict): def __init__(self, parent_system): - super().__init__() self._parent = parent_system # so we can access the instrument list self.set_caching_on() diff --git a/systems/tests/test_cache.py b/systems/tests/test_cache.py index 56c6dc3167..9d2ef6c3f3 100644 --- a/systems/tests/test_cache.py +++ b/systems/tests/test_cache.py @@ -65,7 +65,6 @@ def single_instrument_with_keywords(self, instrument_code, variation_name): @unittest.SkipTest class TestCache(unittest.TestCase): def setUp(self): - system = System( [testStage1(), testStage2()], simData(), @@ -85,7 +84,6 @@ def test_stage_input_wrapper(self): self.assertEqual(0, len(self.system.cache.get_items_with_data())) def test_single_instrument_no_keywords(self): - self.system.test_stage1.single_instrument_no_keywords("code") cache_ref = self.system.cache.get_cacherefs_for_stage("test_stage1")[0] self.assertEqual("code", cache_ref.instrument_code) @@ -111,7 +109,6 @@ def test_single_instrument_no_keywords(self): self.assertEqual(0, len(self.system.cache.get_items_with_data())) def test_single_instrument_with_keywords(self): - self.system.test_stage1.single_instrument_with_keywords("code", "keyname") cache_ref = self.system.cache.get_cacherefs_for_stage("test_stage1")[0] self.assertEqual("code", cache_ref.instrument_code) @@ -175,7 +172,6 @@ def test_pickling(self): self.assertEqual(4, len(cache_refs)) def test_protection_and_deletion_across(self): - # one protected, one unprotected ans = self.system.test_stage1.single_instrument_no_keywords("code") ans2 = self.system.test_stage1.single_instrument_protected("code") diff --git a/systems/tests/test_forecast_combine.py b/systems/tests/test_forecast_combine.py index 383dab91c0..a1e1f4021d 100644 --- a/systems/tests/test_forecast_combine.py +++ b/systems/tests/test_forecast_combine.py @@ -9,7 +9,6 @@ @unittest.SkipTest class Test(unittest.TestCase): def setUp(self): - ( fcs, rules, @@ -63,7 +62,6 @@ def test_get_combined_threshold_forecsat(self): # max 30.000000 def test_get_capped_forecast(self): - self.assertAlmostEqual( self.system.combForecast.get_capped_forecast("SOFR", "ewmac8") .tail(1) @@ -75,7 +73,6 @@ def test_get_forecast_cap(self): self.assertEqual(self.system.combForecast.get_forecast_cap(), 21.0) def test_get_trading_rule_list(self): - # fixed weights ans = self.system.combForecast.get_trading_rule_list("SOFR") self.assertEqual(ans, ["ewmac16", "ewmac8"]) @@ -145,7 +142,6 @@ def test_get_trading_rule_list(self): self.assertEqual(ans8, ["ewmac16", "ewmac8"]) # missing def test_has_same_rules_as_code(self): - ans = self.system.combForecast.has_same_rules_as_code("SOFR") self.assertEqual(ans, ["SOFR", "US10"]) @@ -153,7 +149,6 @@ def test_has_same_rules_as_code(self): self.assertEqual(ans2, ["BUND"]) def test_get_all_forecasts(self): - ans = self.system.combForecast.get_all_forecasts("SOFR") self.assertAlmostEqual(ans.ewmac16.values[-1], 3.6062425) @@ -161,7 +156,6 @@ def test_get_all_forecasts(self): self.assertAlmostEqual(ans2.ewmac8.values[-1], -0.276206423) def test_get_raw_fixed_forecast_weights(self): - # fixed weights: # nested dict (in config) ans1a = self.system.combForecast.get_forecast_weights("SOFR") diff --git a/systems/tests/test_forecast_scale_cap.py b/systems/tests/test_forecast_scale_cap.py index 3fa0facb61..8be0bb3052 100644 --- a/systems/tests/test_forecast_scale_cap.py +++ b/systems/tests/test_forecast_scale_cap.py @@ -23,7 +23,6 @@ def test_get_raw_forecast(self): self.assertAlmostEqual(ans.values[0], 0.164383, places=6) def test_get_forecast_cap(self): - ans = self.system.forecastScaleCap.get_forecast_cap() self.assertEqual(ans, 21.0) @@ -94,7 +93,6 @@ def test_get_forecast_scalar(self): @unittest.SkipTest def test_get_scaled_forecast(self): - self.assertAlmostEqual( self.system.forecastScaleCap.get_scaled_forecast("EDOLLAR", "ewmac8") .tail(1) @@ -104,7 +102,6 @@ def test_get_scaled_forecast(self): @unittest.SkipTest def test_get_capped_forecast(self): - # fixed, normal cap self.assertAlmostEqual( self.system.forecastScaleCap.get_capped_forecast("EDOLLAR", "ewmac8") diff --git a/systems/tests/test_forecasts.py b/systems/tests/test_forecasts.py index c7ddf399e1..9ba87f983b 100644 --- a/systems/tests/test_forecasts.py +++ b/systems/tests/test_forecasts.py @@ -20,7 +20,6 @@ class Test(unittest.TestCase): @unittest.SkipTest def testRules(self): - # config=Config(dict(trading_rules=dict(ewmac=dict(function="systems.provided.rules.ewmac.ewmac_forecast_with_defaults")))) NOTUSEDrawdata, data, NOTUSEDconfig = get_test_object() @@ -141,7 +140,6 @@ def testinitTradingRules(self): @unittest.SkipTest def testCallingTradingRule(self): - # config=Config(dict(trading_rules=dict(ewmac=dict(function="systems.provided.rules.ewmac..ewmac_forecast_with_defaults")))) NOTUSEDrawdata, data, NOTUSEDconfig = get_test_object() @@ -194,7 +192,6 @@ def testCarryRule(self): self.assertAlmostEqual(ans.tail(1).values[0], 0.138302, 5) def testProcessTradingRuleSpec(self): - ruleA = TradingRule(ewmac_forecast_with_defaults) ruleB = TradingRule( dict( diff --git a/systems/tests/test_portfolio.py b/systems/tests/test_portfolio.py index f3f52dedd9..e3b84a4385 100644 --- a/systems/tests/test_portfolio.py +++ b/systems/tests/test_portfolio.py @@ -9,7 +9,6 @@ class Test(unittest.TestCase): def setUp(self): - ( posobject, combobject, diff --git a/systems/tests/test_rawdata.py b/systems/tests/test_rawdata.py index f6adf8d489..fe7cf6974a 100644 --- a/systems/tests/test_rawdata.py +++ b/systems/tests/test_rawdata.py @@ -5,7 +5,6 @@ class Test(unittest.TestCase): def setUp(self): - (rawdata, data, config) = get_test_object() system = System([rawdata], data) diff --git a/systems/tests/testfuturesrawdata.py b/systems/tests/testfuturesrawdata.py index 6b15bad219..04825a3ad0 100644 --- a/systems/tests/testfuturesrawdata.py +++ b/systems/tests/testfuturesrawdata.py @@ -18,7 +18,6 @@ def get_test_object_futures(): class Test(unittest.TestCase): def setUp(self): - (rawdata, data, config) = get_test_object_futures() system = System([rawdata], data, config) diff --git a/systems/tools/autogroup.py b/systems/tools/autogroup.py index 1c61402586..64bb7ab475 100644 --- a/systems/tools/autogroup.py +++ b/systems/tools/autogroup.py @@ -39,7 +39,6 @@ class autoGroupPortfolioWeight(dict): def __init__( self, auto_group_weights: dict, auto_group_parameters: dict = arg_not_supplied ): - copy_auto_group_weights = copy(auto_group_weights) group_weight = copy_auto_group_weights.pop(WEIGHT_FLAG, 1.0) auto_group_weights_without_weight_entry = copy_auto_group_weights diff --git a/systems/trading_rules.py b/systems/trading_rules.py index 1c9b063121..f920ab6be2 100644 --- a/systems/trading_rules.py +++ b/systems/trading_rules.py @@ -140,7 +140,6 @@ def _get_data_from_system(self, system: "System", instrument_code: str): def _get_data_methods_from_list_of_data_string( self, list_of_data_str_references: list, system ) -> list: - # Turn a list of strings into a list of function objects list_of_data_methods = [ resolve_data_method(system, data_string) @@ -155,7 +154,6 @@ def _get_data_from_list_of_methods_and_arguments( list_of_data_methods: list, list_of_args_to_pass_to_data_calls: list, ) -> list: - # Call the functions, providing additional data if neccesssary list_of_data_for_call = [ data_method(instrument_code, **data_arguments) @@ -200,7 +198,6 @@ def __init__( other_args: dict, data_args: list = arg_not_supplied, ): - rule_function, data, other_args, data_args = self._process_inputs( rule_function=rule_function, data=data, @@ -221,7 +218,6 @@ def _process_inputs( other_args: dict, data_args: list = arg_not_supplied, ): - # turn string into a callable function if required self._rule_function = resolve_function(rule_function) @@ -260,7 +256,6 @@ def other_args(self) -> dict: return self._other_args def _check_values(self): - assert isinstance(self.data, list) assert isinstance(self.data_args, list) assert isinstance(self.other_args, dict) @@ -270,7 +265,6 @@ def _check_values(self): def _get_trading_rule_components_depending_on_rule_input( rule, data: list, other_args: dict ) -> tradingRuleComponents: - if data is arg_not_supplied: data = [] @@ -305,7 +299,6 @@ def _already_a_trading_rule(rule): def _create_rule_from_existing_rule( rule, data: list, other_args: dict ) -> tradingRuleComponents: - _throw_warning_if_passed_rule_and_data( "tradingRule", data=data, other_args=other_args ) @@ -328,7 +321,6 @@ def _throw_warning_if_passed_rule_and_data( def _create_rule_from_tuple(rule, data: list, other_args: dict): - _throw_warning_if_passed_rule_and_data("tuple", data=data, other_args=other_args) if len(rule) != 3: @@ -345,7 +337,6 @@ def _create_rule_from_tuple(rule, data: list, other_args: dict): def _create_rule_from_dict(rule, data: list, other_args: dict) -> tradingRuleComponents: - _throw_warning_if_passed_rule_and_data("dict", data=data, other_args=other_args) try: @@ -368,7 +359,6 @@ def _create_rule_from_dict(rule, data: list, other_args: dict) -> tradingRuleCom def _create_rule_from_passed_elements( rule, data: list, other_args: dict ) -> tradingRuleComponents: - rule_components = tradingRuleComponents( rule_function=rule, data=data, other_args=other_args ) @@ -486,7 +476,6 @@ def create_variations( """ if key_argname is None: - if all([len(args_dict) == 1 for args_dict in list_of_args_dict]): # okay to use argname as only seems to be one of them key_argname = list_of_args_dict[0].keys()[0] diff --git a/tests/test_examples.py b/tests/test_examples.py index 290c5862c6..6ab6d08746 100644 --- a/tests/test_examples.py +++ b/tests/test_examples.py @@ -92,7 +92,6 @@ class TestExamples: """ def test_simple_system_rules(self, data, raw_data): - my_rules = Rules(ewmac) print(my_rules.trading_rules()) @@ -104,7 +103,6 @@ def test_simple_system_rules(self, data, raw_data): print(my_system.rules.get_raw_forecast("EDOLLAR", "ewmac").tail(5)) def test_simple_system_trading_rule(self, data, raw_data, ewmac_8, ewmac_32): - ewmac_rule = TradingRule(ewmac) print(ewmac_rule) @@ -117,7 +115,6 @@ def test_simple_system_trading_rule(self, data, raw_data, ewmac_8, ewmac_32): def test_simple_system_trading_rules_estimated( self, data, raw_data, ewmac_8, ewmac_32, fcs ): - my_rules = Rules(dict(ewmac8=ewmac_8, ewmac32=ewmac_32)) my_config = Config() print(my_config) @@ -138,7 +135,6 @@ def test_simple_system_trading_rules_estimated( ) def test_simple_system_trading_rules_fixed(self, data, my_rules, fcs): - # or we can use the values from the book my_config = Config() my_config.trading_rules = dict(ewmac8=ewmac_8, ewmac32=ewmac_32) @@ -154,7 +150,6 @@ def test_simple_system_trading_rules_fixed(self, data, my_rules, fcs): def test_simple_system_combing_rules( self, data, raw_data, my_rules, my_config, fcs ): - # defaults combiner = ForecastCombine() my_system = System([fcs, my_rules, combiner, raw_data], data, my_config) @@ -169,7 +164,6 @@ def test_simple_system_combing_rules( def test_simple_system_combining_and_estimating( self, data, raw_data, my_rules, my_config, fcs, combiner, possizer, account ): - # estimates: my_config.forecast_weight_estimate = dict(method="one_period") my_config.use_forecast_weight_estimates = True @@ -187,7 +181,6 @@ def test_simple_system_combining_and_estimating( ) def test_simple_system_combining_fixed(self, data, raw_data, my_config, fcs): - # fixed: my_config.forecast_weights = dict(ewmac8=0.5, ewmac32=0.5) my_config.forecast_div_multiplier = 1.1 @@ -204,7 +197,6 @@ def test_simple_system_combining_fixed(self, data, raw_data, my_config, fcs): def test_simple_system_position_sizing( self, data, raw_data, my_rules, my_config, fcs, combiner, possizer ): - # size positions my_config.percentage_vol_target = 25 my_config.notional_trading_capital = 500000 @@ -230,7 +222,6 @@ def test_simple_system_position_sizing( def test_simple_system_portfolio_estimated( self, data, raw_data, my_rules, my_config, fcs, combiner, possizer, account ): - # portfolio - estimated portfolio = Portfolios() @@ -252,7 +243,6 @@ def test_simple_system_portfolio_estimated( def test_simple_system_portfolio_fixed( self, data, raw_data, my_rules, my_config, fcs, combiner, possizer, portfolio ): - # or fixed my_config.use_instrument_weight_estimates = False my_config.use_instrument_div_mult_estimates = False @@ -279,7 +269,6 @@ def test_simple_system_costs( portfolio, account, ): - my_config.forecast_weights = dict(ewmac8=0.5, ewmac32=0.5) my_config.instrument_weights = dict(US10=0.1, EDOLLAR=0.4, CORN=0.3, SP500=0.2) @@ -296,7 +285,6 @@ def test_simple_system_costs( print(profits.net.percent.stats()) def test_simple_system_config_object(self, data, ewmac_8, ewmac_32): - my_config = Config( dict( trading_rules=dict(ewmac8=ewmac_8, ewmac32=ewmac_32), @@ -333,7 +321,6 @@ def test_simple_system_config_object(self, data, ewmac_8, ewmac_32): @pytest.mark.slow # will be skipped unless run with 'pytest --runslow' def test_simple_system_risk_overlay(self, data, ewmac_8, ewmac_32): - my_config = Config( dict( trading_rules=dict(ewmac8=ewmac_8, ewmac32=ewmac_32), @@ -375,7 +362,6 @@ def test_simple_system_risk_overlay(self, data, ewmac_8, ewmac_32): print(my_system.portfolio.get_notional_position("EDOLLAR").tail(5)) def test_simple_system_config_import(self, data): - my_config = Config("systems.provided.example.simplesystemconfig.yaml") my_config.exclude_instrument_lists = dict( ignore_instruments=["MILK"], diff --git a/tox.ini b/tox.ini index f3ee7cf354..e1f2582627 100644 --- a/tox.ini +++ b/tox.ini @@ -1,5 +1,5 @@ [tox] -envlist = py{35,36} +envlist = py{310} [testenv] commands = From 9a43904ba24a026dd5ad3f03cb40c440c1952d8f Mon Sep 17 00:00:00 2001 From: Robert Carver Date: Tue, 21 Nov 2023 09:05:35 +0000 Subject: [PATCH 130/235] Update backup_parquet_data_to_remote.py --- sysproduction/backup_parquet_data_to_remote.py | 16 +++++++++------- 1 file changed, 9 insertions(+), 7 deletions(-) diff --git a/sysproduction/backup_parquet_data_to_remote.py b/sysproduction/backup_parquet_data_to_remote.py index c8481f5bd4..7079ceb9b4 100644 --- a/sysproduction/backup_parquet_data_to_remote.py +++ b/sysproduction/backup_parquet_data_to_remote.py @@ -19,8 +19,7 @@ def backup_parquet_data_to_remote(): -def get_parquet_directory(): - data = dataBlob() +def get_parquet_directory(data): return data.parquet_root_directory @@ -28,16 +27,19 @@ class backupParquet(object): def __init__(self, data): self.data = data - def backup_parquet_data_to_remote(self): + def backup_parquet(self): data = self.data log = data.log log.debug("Copying data to backup destination") - backup_parquet_data(data) - + backup_parquet_data_to_remote_with_data(data) +def backup_parquet_data_to_remote(): + ## if called as standalone script + data = dataBlob() + backup_parquet_data_to_remote_with_data(data) -def backup_parquet_data(data): - source_path = get_parquet_directory() +def backup_parquet_data_to_remote_with_data(data): + source_path = get_parquet_directory(data) destination_path = get_parquet_backup_directory() data.log.debug("Copy from %s to %s" % (source_path, destination_path)) os.system("rsync -av %s %s" % (source_path, destination_path)) From 3c7b974f3bac291b6c18f0a28f9ef23a09e5bdc2 Mon Sep 17 00:00:00 2001 From: Robert Carver Date: Tue, 21 Nov 2023 09:08:39 +0000 Subject: [PATCH 131/235] Update backup_parquet_data_to_remote.py --- sysproduction/backup_parquet_data_to_remote.py | 7 +------ 1 file changed, 1 insertion(+), 6 deletions(-) diff --git a/sysproduction/backup_parquet_data_to_remote.py b/sysproduction/backup_parquet_data_to_remote.py index 7079ceb9b4..1018d1ce6d 100644 --- a/sysproduction/backup_parquet_data_to_remote.py +++ b/sysproduction/backup_parquet_data_to_remote.py @@ -13,7 +13,7 @@ def backup_parquet_data_to_remote(): data = dataBlob(log_name="backup_mongo_data_as_dump") backup_object = backupParquet(data) - backup_object.backup_parquet_data_to_remote() + backup_object.backup_parquet() return None @@ -33,11 +33,6 @@ def backup_parquet(self): log.debug("Copying data to backup destination") backup_parquet_data_to_remote_with_data(data) -def backup_parquet_data_to_remote(): - ## if called as standalone script - data = dataBlob() - backup_parquet_data_to_remote_with_data(data) - def backup_parquet_data_to_remote_with_data(data): source_path = get_parquet_directory(data) destination_path = get_parquet_backup_directory() From 70ec9a536b90b026fd2cc12446f0062f3f5e6c1d Mon Sep 17 00:00:00 2001 From: Rob Carver Date: Tue, 21 Nov 2023 16:17:08 +0000 Subject: [PATCH 132/235] fix weird IB date string --- sysbrokers/IB/client/ib_contracts_client.py | 1 + systems/accounts/pandl_calculators/pandl_calculation.py | 3 +++ 2 files changed, 4 insertions(+) diff --git a/sysbrokers/IB/client/ib_contracts_client.py b/sysbrokers/IB/client/ib_contracts_client.py index 1ab12fa74b..c8651c5979 100644 --- a/sysbrokers/IB/client/ib_contracts_client.py +++ b/sysbrokers/IB/client/ib_contracts_client.py @@ -88,6 +88,7 @@ def broker_get_single_contract_expiry_date( raise missingContract expiry_date = ibcontract.lastTradeDateOrContractMonth + expiry_date = expiry_date[:8] ## in case of weird '... GB format' return expiry_date diff --git a/systems/accounts/pandl_calculators/pandl_calculation.py b/systems/accounts/pandl_calculators/pandl_calculation.py index 0cd00c2e22..c21099b0ea 100644 --- a/systems/accounts/pandl_calculators/pandl_calculation.py +++ b/systems/accounts/pandl_calculators/pandl_calculation.py @@ -67,6 +67,9 @@ def as_pd_series_for_frequency( ) -> pd.Series: as_pd_series = self.as_pd_series(**kwargs) + ## FIXME: Ugly to get pandas 2.x working + as_pd_series.index = pd.to_datetime(as_pd_series.index) + resample_freq = from_config_frequency_pandas_resample(frequency) pd_series_at_frequency = as_pd_series.resample(resample_freq).sum() From f6b2e1ba6c19aa7b87f9b580402f18432eb0f6d8 Mon Sep 17 00:00:00 2001 From: Rob Carver Date: Tue, 21 Nov 2023 17:09:34 +0000 Subject: [PATCH 133/235] fix future warning --- sysdata/production/historic_contract_positions.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sysdata/production/historic_contract_positions.py b/sysdata/production/historic_contract_positions.py index cfe628cd63..fdbc374500 100644 --- a/sysdata/production/historic_contract_positions.py +++ b/sysdata/production/historic_contract_positions.py @@ -34,7 +34,7 @@ def get_current_position_for_contract_object( if len(position_series) == 0: return 0.0 - return position_series[-1] + return position_series.iloc[-1] def update_position_for_contract_object( self, From 04d94ed096ea1d4a5cf5925706d43fa326e42d85 Mon Sep 17 00:00:00 2001 From: Rob Carver Date: Tue, 21 Nov 2023 17:15:14 +0000 Subject: [PATCH 134/235] fix future warning --- sysdata/production/historic_strategy_positions.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sysdata/production/historic_strategy_positions.py b/sysdata/production/historic_strategy_positions.py index 6337ed2fd9..036b2f2886 100644 --- a/sysdata/production/historic_strategy_positions.py +++ b/sysdata/production/historic_strategy_positions.py @@ -41,7 +41,7 @@ def get_current_position_for_instrument_strategy_object( if len(position_series) == 0: return 0 - return position_series[-1] + return position_series.iloc[-1] def update_position_for_instrument_strategy_object( self, From f80f20222af3d76dac233714e386eaf6539f9c52 Mon Sep 17 00:00:00 2001 From: Rob Carver Date: Tue, 21 Nov 2023 19:03:38 +0000 Subject: [PATCH 135/235] fixed .append issue --- sysdata/production/historic_contract_positions.py | 2 +- sysdata/production/historic_strategy_positions.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/sysdata/production/historic_contract_positions.py b/sysdata/production/historic_contract_positions.py index fdbc374500..2315e9ea8e 100644 --- a/sysdata/production/historic_contract_positions.py +++ b/sysdata/production/historic_contract_positions.py @@ -193,7 +193,7 @@ def _update_position_for_contract_object_with_date_and_existing_data( self.log.critical(error_msg) raise Exception(error_msg) - updated_series = current_series.append(new_position_series) + updated_series = current_series._append(new_position_series) self._write_updated_position_series_for_contract_object( contract_object=contract_object, updated_series=updated_series ) diff --git a/sysdata/production/historic_strategy_positions.py b/sysdata/production/historic_strategy_positions.py index 036b2f2886..bffee3b8e7 100644 --- a/sysdata/production/historic_strategy_positions.py +++ b/sysdata/production/historic_strategy_positions.py @@ -221,7 +221,7 @@ def _update_position_for_instrument_strategy_object_with_date_and_existing_data( self.log.critical(error_msg) raise Exception(error_msg) - updated_series = current_series.append(new_position_series) + updated_series = current_series._append(new_position_series) self._write_updated_position_series_for_instrument_strategy_object( instrument_strategy=instrument_strategy, updated_series=updated_series ) From 0c8faa165f556450eabb9df3d5c468bddff68217 Mon Sep 17 00:00:00 2001 From: Rob Carver Date: Tue, 21 Nov 2023 19:32:20 +0000 Subject: [PATCH 136/235] supress warning fix bfill error --- syscore/pandas/full_merge_with_replacement.py | 2 +- sysobjects/dict_of_named_futures_per_contract_prices.py | 2 +- sysobjects/futures_per_contract_prices.py | 5 ++++- 3 files changed, 6 insertions(+), 3 deletions(-) diff --git a/syscore/pandas/full_merge_with_replacement.py b/syscore/pandas/full_merge_with_replacement.py index df1ce8db84..64fdcd440d 100644 --- a/syscore/pandas/full_merge_with_replacement.py +++ b/syscore/pandas/full_merge_with_replacement.py @@ -190,7 +190,7 @@ def full_merge_of_existing_series( # fill to the left # NA from the original series will be preserved - joint_data_filled_across = joint_data.bfill(1) + joint_data_filled_across = joint_data.bfill(axis=1) merged_data = joint_data_filled_across["original"] else: # update older data with non-NA values from new data series diff --git a/sysobjects/dict_of_named_futures_per_contract_prices.py b/sysobjects/dict_of_named_futures_per_contract_prices.py index b4c83acd0e..74f637ef8b 100644 --- a/sysobjects/dict_of_named_futures_per_contract_prices.py +++ b/sysobjects/dict_of_named_futures_per_contract_prices.py @@ -132,7 +132,7 @@ def final_contract(self) -> str: """ contract_ids = self.ts_of_contract_str - return contract_ids[-1] + return contract_ids.iloc[-1] def check_all_contracts_equal_to(self, test_contractid: str) -> bool: """ diff --git a/sysobjects/futures_per_contract_prices.py b/sysobjects/futures_per_contract_prices.py index e86fdfeb9d..aaa136e3c8 100644 --- a/sysobjects/futures_per_contract_prices.py +++ b/sysobjects/futures_per_contract_prices.py @@ -1,3 +1,4 @@ +import warnings import pandas as pd import datetime from copy import copy @@ -32,7 +33,9 @@ def __init__(self, price_data_as_df: pd.DataFrame): price_data_as_df.index.name = "index" # for arctic compatibility super().__init__(price_data_as_df) - self._as_df = price_data_as_df + with warnings.catch_warnings(): + warnings.filterwarnings("ignore", message="UserWarning: Pandas doesn't allow columns to be created via a new attribute name - see https://pandas.pydata.org/pandas-docs/stable/indexing.html#attribute-access") + self._as_df = price_data_as_df def __copy__(self): return futuresContractPrices(copy(self._as_df)) From 8b965f9f715f7be3b6677c09238ab3d87c0d4aca Mon Sep 17 00:00:00 2001 From: Rob Carver Date: Wed, 22 Nov 2023 09:10:46 +0000 Subject: [PATCH 137/235] fixed append in margn --- sysbrokers/IB/client/ib_contracts_client.py | 2 +- sysdata/production/margin.py | 2 +- sysobjects/futures_per_contract_prices.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/sysbrokers/IB/client/ib_contracts_client.py b/sysbrokers/IB/client/ib_contracts_client.py index c8651c5979..2e1b8641e2 100644 --- a/sysbrokers/IB/client/ib_contracts_client.py +++ b/sysbrokers/IB/client/ib_contracts_client.py @@ -88,7 +88,7 @@ def broker_get_single_contract_expiry_date( raise missingContract expiry_date = ibcontract.lastTradeDateOrContractMonth - expiry_date = expiry_date[:8] ## in case of weird '... GB format' + expiry_date = expiry_date[:8] ## in case of weird '... GB format' return expiry_date diff --git a/sysdata/production/margin.py b/sysdata/production/margin.py index daee162e24..28b85d43aa 100644 --- a/sysdata/production/margin.py +++ b/sysdata/production/margin.py @@ -13,7 +13,7 @@ def final_value(self) -> float: return self.values[-1] def add_value(self, value: float, dateref=datetime.datetime.now()): - return seriesOfMargin(self.append(pd.Series([value], index=[dateref]))) + return seriesOfMargin(self._append(pd.Series([value], index=[dateref]))) class marginData(object): diff --git a/sysobjects/futures_per_contract_prices.py b/sysobjects/futures_per_contract_prices.py index aaa136e3c8..91b86d6904 100644 --- a/sysobjects/futures_per_contract_prices.py +++ b/sysobjects/futures_per_contract_prices.py @@ -34,7 +34,7 @@ def __init__(self, price_data_as_df: pd.DataFrame): super().__init__(price_data_as_df) with warnings.catch_warnings(): - warnings.filterwarnings("ignore", message="UserWarning: Pandas doesn't allow columns to be created via a new attribute name - see https://pandas.pydata.org/pandas-docs/stable/indexing.html#attribute-access") + warnings.filterwarnings("ignore", UserWarning) self._as_df = price_data_as_df def __copy__(self): From 1cd8b7474c1d0d5ade584eb190cc655784f4c30b Mon Sep 17 00:00:00 2001 From: Rob Carver Date: Wed, 22 Nov 2023 09:16:28 +0000 Subject: [PATCH 138/235] now correct warning syntax --- sysobjects/futures_per_contract_prices.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sysobjects/futures_per_contract_prices.py b/sysobjects/futures_per_contract_prices.py index 91b86d6904..d20024241c 100644 --- a/sysobjects/futures_per_contract_prices.py +++ b/sysobjects/futures_per_contract_prices.py @@ -34,7 +34,7 @@ def __init__(self, price_data_as_df: pd.DataFrame): super().__init__(price_data_as_df) with warnings.catch_warnings(): - warnings.filterwarnings("ignore", UserWarning) + warnings.filterwarnings("ignore", category=UserWarning) self._as_df = price_data_as_df def __copy__(self): From fe87e20b1bd340d5c41f56c121d1344899b23e54 Mon Sep 17 00:00:00 2001 From: Rob Carver Date: Wed, 22 Nov 2023 09:23:08 +0000 Subject: [PATCH 139/235] fixed backup issue --- sysproduction/backup_db_to_csv.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/sysproduction/backup_db_to_csv.py b/sysproduction/backup_db_to_csv.py index cc13f8c13d..7367c08ca7 100644 --- a/sysproduction/backup_db_to_csv.py +++ b/sysproduction/backup_db_to_csv.py @@ -132,7 +132,7 @@ def get_data_and_create_csv_directories(logname): get_class_for_data_type(FUTURES_CONTRACT_PRICE_DATA), get_class_for_data_type(FUTURES_MULTIPLE_PRICE_DATA), get_class_for_data_type(FX_DATA), - get_class_for_data_type(SPREAD_DATA), + get_class_for_data_type(STORED_SPREAD_DATA), get_class_for_data_type(BROKER_HISTORIC_ORDERS_DATA), get_class_for_data_type(CONTRACT_HISTORIC_ORDERS_DATA), get_class_for_data_type(STRATEGY_HISTORIC_ORDERS_DATA), @@ -141,7 +141,7 @@ def get_data_and_create_csv_directories(logname): get_class_for_data_type(FUTURES_CONTRACT_DATA), get_class_for_data_type(OPTIMAL_POSITION_DATA), get_class_for_data_type(ROLL_STATE_DATA), - get_class_for_data_type(SPREAD_DATA), + get_class_for_data_type(HISTORIC_SPREAD_DATA), ], use_prefix="db", ) From 3c8304435c3db6de74c2cb634029061980c1b037 Mon Sep 17 00:00:00 2001 From: Rob Carver Date: Wed, 22 Nov 2023 09:31:22 +0000 Subject: [PATCH 140/235] slippage reporting append error --- sysproduction/reporting/data/trades.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sysproduction/reporting/data/trades.py b/sysproduction/reporting/data/trades.py index d48309bf85..ab880e5409 100644 --- a/sysproduction/reporting/data/trades.py +++ b/sysproduction/reporting/data/trades.py @@ -123,7 +123,7 @@ def raw_slippage_row(order_row): ) = price_calculations_for_order_row(order_row) new_order_row = copy(order_row) new_order_row = new_order_row[NEW_ORDER_ROW_COLS] - new_order_row = new_order_row.append( + new_order_row = new_order_row._append( pd.Series( [ delay, From 9b81968746f233b05d4f1e0dac379824edc59458 Mon Sep 17 00:00:00 2001 From: robcarver17 Date: Wed, 22 Nov 2023 09:43:35 +0000 Subject: [PATCH 141/235] update spread costs --- data/futures/csvconfig/spreadcosts.csv | 778 ++++++++++++------------- 1 file changed, 389 insertions(+), 389 deletions(-) diff --git a/data/futures/csvconfig/spreadcosts.csv b/data/futures/csvconfig/spreadcosts.csv index 94acff1915..9e9c4d7a11 100644 --- a/data/futures/csvconfig/spreadcosts.csv +++ b/data/futures/csvconfig/spreadcosts.csv @@ -2,144 +2,144 @@ Instrument,SpreadCost AEX,0.063 AEX_mini,2.4 ALUMINIUM,1.8 -ALUMINIUM_LME,0 -AMERIBOR-1M,0 -AMERIBOR-3M,0 -AMERIBOR-T30,0 -AMX,0 -ASX,0 -ATX,0 -AUD,3.3E-05 -AUD-ICE,0 +ALUMINIUM_LME,1.0 +AMERIBOR-1M,0.0 +AMERIBOR-3M,0.0 +AMERIBOR-T30,0.0 +AMX,0.0 +ASX,0.0 +ATX,0.0 +AUD,3.3e-05 +AUD-ICE,0.0 AUD-SGX,0.00015 -AUDCAD,0 +AUDCAD,0.0 AUDJPY,0.015 AUDJPY-SGX,0.015 -AUD_micro,9.1E-05 +AUD_micro,5.9999999999999995e-05 AUSCASH,0.005 -BARLEY,0 -BB3M,0.0069 +BARLEY,0.0 +BB3M,0.017 BBCOMM,0.1 -BEL20,9 -BITCOIN,20 -BITCOIN-BAKKT,0 +BEL20,9.0 +BITCOIN,9.4 +BITCOIN-BAKKT,0.0 BOBL,0.0051 BONO,0.07 -BONO-MEFF,0 -BOVESPA,110 -BOVESPA-HK,0 -BRE,3.5E-05 -BRENT,0 -BRENT-LAST,0.018 -BRENT_W,0 -BRR,0 -BRREUR,0 +BONO-MEFF,0.0 +BOVESPA,190.0 +BOVESPA-HK,0.0 +BRE,3.5e-05 +BRENT,0.0 +BRENT-LAST,0.018000000000000002 +BRENT_W,0.01 +BRR,0.0 +BRREUR,0.0 BTP,0.0063 BTP3,0.0051 -BTP5,0 +BTP5,0.0 BUND,0.0052 -BUTTER,1.5 +BUTTER,1.0 BUXL,0.015 CAC,0.44 -CAD,3E-05 +CAD,2.9999999999999997e-05 CAD10,0.0056 CAD2,0.0025 CAD5,0.0054 -CADJPY,0 -CADJPY2,0 +CADJPY,0.0 +CADJPY2,0.0 CADSTIR,0.0041 -CAD_micro,5.1E-05 -CAN-ENERGY,0 -CAN-FINANCE,0 -CAN-GOLD,0 -CAN-TECH,0 -CANNABIS_small,0 -CANOLA,2 +CAD_micro,5.1e-05 +CAN-ENERGY,0.0 +CAN-FINANCE,0.0 +CAN-GOLD,0.0 +CAN-TECH,0.0 +CANNABIS_small,0.0 +CANOLA,0.95 CH10,0.25 -CHEESE,0.0059 -CHF,5.8E-05 -CHFJPY,0.022 -CHFJPY-ICE,0.022 +CHEESE,0.0031 +CHF,5.7999999999999994e-05 +CHFJPY,0.022000000000000002 +CHFJPY-ICE,0.022000000000000002 CHF_micro,0.00011 -CHINA120,0 -CHINAA-CON,0 -CLP,1.6E-05 -CNH,0.00023 -CNH-CME,0 -CNH-CME_micro,0 -CNH-HK,0 +CHINA120,0.0 +CHINAA-CON,0.2 +CLP,3.6e-06 +CNH,0.00022999999999999998 +CNH-CME,0.0 +CNH-CME_micro,0.0 +CNH-HK,0.0 CNH-onshore,0.00015 -CNHEUR,0 -COAL,0 -COAL-GEORDIE,0 -COAL-RICH-BAY,0 -COCOA,0.5 -COCOA_LDN,0 +CNHEUR,0.0 +COAL,1.6 +COAL-GEORDIE,4.5 +COAL-RICH-BAY,0.0 +COCOA,0.77 +COCOA_LDN,1.0 COFFEE,0.1 COPPER,0.00043 COPPER-micro,0.0007 -COPPER-mini,0.0035 -COPPER_LME,0 +COPPER-mini,0.0017000000000000001 +COPPER_LME,1.5 CORN,0.16 CORN-EURO,0.12 -CORN-JPN,0 +CORN-JPN,0.0 CORN_mini,0.2 CORRA,0.008 -COTTON,0.75 -COTTON2,0.078 -CRUDE_ICE,0 +COTTON,0.25 +COTTON2,0.044 +CRUDE_ICE,0.015 CRUDE_W,0.012 CRUDE_W_micro,0.017 CRUDE_W_mini,0.03 -CRYPTO_small,0 -CZK,9.4E-05 -DAX,1.4 +CRYPTO_small,0.0 +CZK,9.4e-05 +DAX,0.87 DIVDAX,0.175 -DIVDAX-DIVI,0 -DIVDAX-DIVI2,0 +DIVDAX-DIVI,0.0 +DIVDAX-DIVI2,0.0 DJSTX-SMALL,0.21 -DJUBS,0 +DJUBS,0.0 DOW,0.82 -DOW_YEN,0 +DOW_YEN,0.0 DOW_mini,0.75 -DX,0.1 +DX,0.043 EDOLLAR,0.0025 -EPRA-EURO,0 +EPRA-EURO,0.0 EPRA-EUROPE,3.2 -ETHANOL,1 +ETHANOL,0.38 ETHER-micro,1.8 -ETHEREUM,1 -ETHRR,0 -ETHRREUR,0 +ETHEREUM,0.68 +ETHRR,0.0 +ETHRREUR,0.0 EU-AUTO,0.22 EU-BANKS,0.029 EU-BANKS-DIVI,0.12 -EU-BANKS2,0 +EU-BANKS2,0.0 EU-BASIC,0.26 EU-CHEM,0.4 EU-CONSTRUCTION,0.23 EU-DIV30,0.68 -EU-DIV30-DVP,0 +EU-DIV30-DVP,0.0 EU-DIV50,0.1 -EU-DJ-AUTO,0 -EU-DJ-BASIC,0 +EU-DJ-AUTO,0.0 +EU-DJ-BASIC,0.0 EU-DJ-CHEM,15.55 -EU-DJ-CONSTRUCTION,0 +EU-DJ-CONSTRUCTION,0.0 EU-DJ-FINANCE,6.25 EU-DJ-FOOD,1.55 EU-DJ-HEALTH,4.4 -EU-DJ-HOUSE,0 +EU-DJ-HOUSE,0.0 EU-DJ-INDUSTRY,0.6 -EU-DJ-INDUSTRY2,0 +EU-DJ-INDUSTRY2,0.0 EU-DJ-INSURE,0.35 EU-DJ-MEDIA,2.85 EU-DJ-OIL,0.26 EU-DJ-RETAIL,2.1 EU-DJ-TECH,0.35 EU-DJ-TELECOM,0.15 -EU-DJ-TRAVEL,0 +EU-DJ-TRAVEL,0.0 EU-DJ-UTIL,0.18 -EU-ESG,0 +EU-ESG,0.0 EU-FINANCE,0.2 EU-FOOD,0.25 EU-HEALTH,0.32 @@ -151,110 +151,110 @@ EU-OIL,0.14 EU-REALESTATE,0.1 EU-RETAIL,0.15 EU-TECH,0.27 -EU-TELECOM,0 +EU-TELECOM,0.0 EU-TRAVEL,0.2 EU-UTILS,0.16 -EUA,0.25 -EUIRS10,0 -EUIRS2,0 -EUIRS5,0 -EUR,2.9E-05 -EUR-ICE,0 +EUA,0.088 +EUIRS10,0.0 +EUIRS2,0.0 +EUIRS5,0.0 +EUR,2.8999999999999997e-05 +EUR-ICE,0.0 EURAUD,0.00018 -EURAUD-ICE,0 -EURCAD,0.00017 -EURCAD-ICE,0 +EURAUD-ICE,0.0 +EURCAD,0.00016999999999999999 +EURCAD-ICE,0.0 EURCHF,0.00011 -EURCHF-ICE,0 -EURCZK,0 -EURHUF,0 -EURIBOR,49 -EURIBOR-ICE,0 -EURINR,0 -EURMXP,0 +EURCHF-ICE,0.0 +EURCZK,0.0 +EURHUF,0.0 +EURIBOR,12.0 +EURIBOR-ICE,0.0025 +EURINR,0.0 +EURMXP,0.0 EURO600,0.057 -EURO600-ESG,0 -EUROFIRST100,0 -EUROFIRST80,0 +EURO600-ESG,0.0 +EUROFIRST100,0.0 +EUROFIRST80,0.0 EUROSTX,0.48 -EUROSTX-CORE,0 -EUROSTX-DJ,0 +EUROSTX-CORE,0.0 +EUROSTX-DJ,0.0 EUROSTX-LARGE,0.15 EUROSTX-MID,1.2 EUROSTX-PRICE,0.15 EUROSTX-SMALL,0.16 EUROSTX200-LARGE,0.14 -EUR_micro,9.1E-05 -EUR_mini,9.2E-05 -FANG,0 +EUR_micro,5.9e-05 +EUR_mini,6.1e-05 +FANG,3.8 FED,0.003 FEEDCOW,0.04 -FTSE100,0.1 -FTSE100-DIV,0 -FTSE250,0 -FTSEAFRICA40,0 -FTSECHINAA,1 -FTSECHINAA-CSOP,0 -FTSECHINAA-IS,0 -FTSECHINAH,13 +FTSE100,0.21 +FTSE100-DIV,0.0 +FTSE250,9.5 +FTSEAFRICA40,0.0 +FTSECHINAA,1.0 +FTSECHINAA-CSOP,0.0 +FTSECHINAA-IS,0.0 +FTSECHINAH,13.0 FTSEINDO,5.4 FTSETAIWAN,0.14 -FTSEVIET,8.2 -GAS-LAST,0.0073 -GAS-PEN,0.0052 -GASOIL,0.1 -GASOILINE,0.00073 -GASOILINE_ICE,0 -GASOILINE_micro,0.037 -GAS_NL,0 -GAS_UK,0 -GAS_US,0.0019 +FTSEVIET,12.0 +GAS-LAST,0.0027 +GAS-PEN,0.002 +GASOIL,0.21 +GASOILINE,0.00049 +GASOILINE_ICE,0.0008 +GASOILINE_micro,0.037000000000000005 +GAS_NL,0.0 +GAS_UK,0.0 +GAS_US,0.0011 GAS_US_mini,0.005 -GBP,6.1E-05 -GBP-ICE,0 +GBP,6.1e-05 +GBP-ICE,0.0 GBPCHF,0.00021 -GBPCHF-ICE,0 -GBPEUR,6.4E-05 -GBPEUR-ICE,0 -GBPINR,0 +GBPCHF-ICE,0.0 +GBPEUR,6.4e-05 +GBPEUR-ICE,0.0 +GBPINR,0.0 GBPJPY,0.024 -GBPJPY-ICE,0 -GBP_micro,9E-05 +GBPJPY-ICE,0.0 +GBP_micro,9e-05 GICS,0.56 -GICS-EXCESS,0 -GILT,0 -GILT2,0 -GILT5,0 +GICS-EXCESS,0.0 +GILT,0.005 +GILT2,0.0 +GILT5,0.0 GOLD,0.068 GOLD-CHINA,7.45 -GOLD-CHINA-USD,0 -GOLD-CN-HK,0 -GOLD-HK,0 -GOLD-JPN,0 -GOLD-JPN_mini,0 +GOLD-CHINA-USD,0.0 +GOLD-CN-HK,0.0 +GOLD-HK,0.0 +GOLD-JPN,0.0 +GOLD-JPN_mini,0.0 GOLD-mini,0.24 -GOLD_micro,0.088 -HANG,0 -HANG-DIV,0 -HANGTECH,0 -HANGENT,0 -HANGENT-GTR,0 -HANGENT-NTR,0 -HANGENT_mini,0 -HANG_mini,0 -HEAT-DEG-AMS,0 -HEAT-DEG-LON,0 -HEAT-DEG-NY,0 -HEATOIL,0.00077 -HEATOIL-ICE,0 +GOLD_micro,0.08800000000000001 +HANG,1.5 +HANG-DIV,0.0 +HANG-TECH,0.0 +HANGENT,1.0 +HANGENT-GTR,0.0 +HANGENT-NTR,0.0 +HANGENT_mini,1.5 +HANG_mini,2.0 +HEAT-DEG-AMS,0.0 +HEAT-DEG-LON,0.0 +HEAT-DEG-NY,0.0 +HEATOIL,0.0007700000000000001 +HEATOIL-ICE,0.0011 HEATOIL-mini,0.0325 -HEATOIL_micro,0 -HIBOR,0 -HIGHYIELD,0.02 +HEATOIL_micro,0.0 +HIBOR,0.0 +HIGHYIELD,0.028 HOUSE-BO,3.4 -HOUSE-CG,3 -HOUSE-DC,0 -HOUSE-DN,3 +HOUSE-CG,3.0 +HOUSE-DC,0.0 +HOUSE-DN,3.0 HOUSE-LA,4.3 HOUSE-LV,3.9 HOUSE-MI,4.1 @@ -262,321 +262,321 @@ HOUSE-NY,3.6 HOUSE-SD,2.45 HOUSE-SF,1.95 HOUSE-US,2.4 -HSCEI-DIV,0 -HUF,6.5E-06 -HUFEUR,0 -IBEX_mini,0 -IBXEX,0 -IG,0.02 -IND-BANK,0 -IND-FIN,0 +HSCEI-DIV,0.0 +HUF,6.5000000000000004e-06 +HUFEUR,0.0 +IBEX_mini,2.5 +IBXEX,0.0 +IG,0.029 +IND-BANK,0.0 +IND-FIN,0.0 INR,0.015 INR-SGX,0.01 -INR-SGX1,0 -INR-SGX2,0 +INR-SGX1,0.0 +INR-SGX2,0.0 INR-micro,0.12 -IPC,0 +IPC,0.0 IRON,0.066 -IRON-CME,0 -IRS,0.00068 +IRON-CME,0.0 +IRS,0.0006799999999999999 JGB,0.005 JGB-SGX-mini,0.01 -JGB-mini,0.5 +JGB-mini,0.24 JP-REALESTATE,1.6 -JPY,2.7E-07 +JPY,2.7e-07 JPY-SGX,0.01 JPY-SGX-TITAN,0.01 -JPYINR,0 -JPY_micro,0 -JPY_mini,1.2E-06 +JPYINR,0.0 +JPY_micro,0.0 +JPY_mini,1.2e-06 KOSDAQ,0.15 -KOSPI,0.026 -KOSPI300,0 +KOSPI,0.026000000000000002 +KOSPI300,0.0 KOSPI_mini,0.017 KR10,0.0065 KR3,0.005 KRW,0.00015 -KRWJPY,0 -KRWUSD,1.6E-06 -KRWUSD_mini,0.00017 -LEAD_LME,0 -LEANHOG,0.026 +KRWJPY,0.0 +KRWUSD,1.6000000000000001e-06 +KRWUSD_mini,0.00016999999999999999 +LEAD_LME,1.8 +LEANHOG,0.026000000000000002 LIBOR1,0.01 -LIVECOW,0.018 +LIVECOW,0.018000000000000002 LUMBER,2.1 -LUMBER-new,9.1 -MARS-ARGUS,0 -MIB,0 -MIB-DIVI,0 -MIB_micro,0 -MIB_mini,0 -MID-DAX,23 -MILK,0.051 +LUMBER-new,2.8 +MARS-ARGUS,0.0 +MIB,5.0 +MIB-DIVI,0.0 +MIB_micro,0.0 +MIB_mini,0.0 +MID-DAX,23.0 +MILK,0.025 MILKDRY,1.3 MILKWET,0.19 MILLWHEAT,0.12 MSCIASIA,0.34 -MSCIASIAEXJP,0 -MSCIBRAZIL,0 -MSCICHINA,0 -MSCICHINANET,0 -MSCIEAFA,0 -MSCIEAFA-TOT,0 -MSCIEM,0 -MSCIEM-LIFFE,0 -MSCIEMASIA,0 -MSCIEURONET,0 -MSCIEURONET-ICE,0 -MSCIEUROPE,0 -MSCIEUROPE-ICE,0 -MSCIEUROPE-LIFFE,0 -MSCIINDO,0 -MSCIJAPAN,0 -MSCIJAPAN-LIFFE,0 -MSCILATIN,0 -MSCIPACIFIC,0 -MSCIPANEURO-LIFFE,0 +MSCIASIAEXJP,0.0 +MSCIBRAZIL,0.0 +MSCICHINA,0.0 +MSCICHINANET,0.0 +MSCIEAFA,0.1 +MSCIEAFA-TOT,0.0 +MSCIEM,0.0 +MSCIEM-LIFFE,0.0 +MSCIEMASIA,0.0 +MSCIEURONET,0.0 +MSCIEURONET-ICE,0.0 +MSCIEUROPE,0.0 +MSCIEUROPE-ICE,0.0 +MSCIEUROPE-LIFFE,0.0 +MSCIINDO,0.0 +MSCIJAPAN,0.0 +MSCIJAPAN-LIFFE,0.0 +MSCILATIN,0.0 +MSCIPACIFIC,0.0 +MSCIPANEURO-LIFFE,0.0 MSCIRUSSIA,1.5 -MSCISING,0.036 -MSCITAIWAN,0 -MSCIUSA,0 +MSCISING,0.036000000000000004 +MSCITAIWAN,0.1 +MSCIUSA,0.0 MSCIWORLD,1.1 -MSCIWORLD-MINVOL,0 -MSCIWORLDNET-EUR,0 -MSCIWORLDNET-ICE,0 -MSCIWORLDNET-USD,0 +MSCIWORLD-MINVOL,0.0 +MSCIWORLDNET-EUR,0.0 +MSCIWORLDNET-ICE,0.0 +MSCIWORLDNET-USD,0.0 MUMMY,0.5 -MXP,5.5E-06 +MXP,5.5e-06 NASBIO-mini,1.3 -NASDAQ,0.35 +NASDAQ,0.24 NASDAQ_micro,0.24 -NASDAQ_mini,0 -NICKEL_LME,0 -NIFTY,0.97 -NIFTY-IN,0 +NASDAQ_mini,0.0 +NICKEL_LME,0.0 +NIFTY,3.3 +NIFTY-IN,0.0 NIKKEI,2.5 -NIKKEI-CME,0 -NIKKEI-JPY,2100 -NIKKEI-JPY_mini,0 -NIKKEI-SGX,0 -NIKKEI-SGX-DIV,0 -NIKKEI-SGX-USD,0 -NIKKEI-SGX_mini,0 +NIKKEI-CME,0.0 +NIKKEI-JPY,2100.0 +NIKKEI-JPY_mini,0.0 +NIKKEI-SGX,0.0 +NIKKEI-SGX-DIV,0.0 +NIKKEI-SGX-USD,0.0 +NIKKEI-SGX_mini,0.0 NIKKEI400,3.1 -NIKKEI_large,0 -NOK,3.6E-05 -NZD,4.2E-05 +NIKKEI_large,0.0 +NOK,3.6e-05 +NZD,4.2e-05 OAT,0.0072 -OAT5,0 -OATIES,1.8 +OAT5,0.0 +OATIES,0.98 OJ,1.075 -OMX,51 -OMX-SWE,0 -OMXESG,0 -OMXSB,0 -PALLAD,2.4 -PIPELINE,0 +OMX,25.0 +OMX-SWE,0.0 +OMXESG,0.0 +OMXSB,0.0 +PALLAD,1.4 +PIPELINE,0.0 PLAT,0.23 -PLAT-JPN,0 -PLAT-JPN_mini,0 +PLAT-JPN,0.0 +PLAT-JPN_mini,0.0 PLN,0.00014 PLZEUR,0.0005 -PRECIOUS_small,0 +PRECIOUS_small,0.0 R1000,0.4 R1000GROWTH-mini,0.4 R1000_mini,0.45 RAPESEED,0.27 REDWHEAT,0.3 -RICE,0.022 -ROBUSTA,0.5 +RICE,0.022000000000000002 +ROBUSTA,0.84 RUBBER,0.1 RUBBER-RSS,0.48 -RUR,0.00015 +RUR,4.4e-05 RUSSELL,0.11 -RUSSELL-GROWTH,0 -RUSSELL-VALUE,0 +RUSSELL-GROWTH,0.0 +RUSSELL-VALUE,0.0 RUSSELL_mini,0.094 -SARONA,0 -SEK,3E-05 +SARONA,0.0025 +SEK,2.9999999999999997e-05 SGD,0.00014 SGD_mini,0.00018 -SGX,5 +SGX,11.0 SHATZ,0.0025 SILVER,0.004 SILVER-mini,0.0125 -SING-REALESTATE,0 -SMALL75,0 +SING-REALESTATE,0.0 +SMALL75,0.0 SMI,0.77 -SMI-DIV,0 -SMI-MID,3.2 -SMIETF,0 +SMI-DIV,0.0 +SMI-MID,4.5 +SMIETF,0.0 SOFR,0.003 SOFR1,0.0025 -SONIA,0 -SONIA1,0 -SONIA3,0 +SONIA,0.0 +SONIA1,0.0 +SONIA3,0.0025 SOYBEAN,0.25 SOYBEAN_mini,0.35 SOYMEAL,0.091 SOYOIL,0.015 SP400,0.25 SP500,0.13 -SP500-GROWTH,0 -SP500-VALUE,0 +SP500-GROWTH,0.0 +SP500-VALUE,0.0 SP500_micro,0.19 -SP500_mini,0 -SP600-SMALL,0 -SPI200,1 -SS-AIA-GROUP,0 -SS-ALIBABA-HLDS,0 -SS-ASML,0 -SS-ASS-GEN,0 -SS-BAIDU,0 -SS-BANK-CHINA,0 -SS-BOC-HONG-KONG,0 -SS-BUDWEISER-BREWING,0 -SS-BYD-CO,0 -SS-CHINA-CONSTR-BK,0 -SS-CHINA-MERCH-BANK,0 -SS-CHINA-MOBILE,0 -SS-CHINA-UNICOM-HK,0 -SS-CITIC-LTD,0 -SS-CK-HUTCHISON,0 -SS-CLP-HOLDINGS,0 -SS-CNOOC,0 -SS-DTE,0 -SS-ENEL-SPA,0 -SS-HDFC,0 -SS-HEINK,0 -SS-HIND-UNILV,0 -SS-HK-CHINA-GAS,0 -SS-HK-EXCH-CLEAR,0 -SS-HOUS-DEV-FIN,0 -SS-HSBC-HLDS,0 -SS-IBERDROLA,0 -SS-IND-COMM-BK-OF-CHINA,0 -SS-IND-DIS-TEXT,0 -SS-INFOSYS,0 -SS-JD-COM,0 -SS-KOTAK-BANK,0 -SS-LOREAL,0 -SS-MEITUAN-B,0 -SS-NETEASE,0 -SS-NONGFU,0 -SS-OVERSEA-CH-BK,0 -SS-PING-AN-INSURANCE,0 -SS-POWER-ASSETS-HLDS,0 -SS-RELIANCE,0 -SS-ROCHE,0 -SS-SING-TEL,0 -SS-SUN-HUNG-KAI,0 -SS-TATA,0 -SS-TENCENT-HLDS,0 -SS-THAI-BEV,0 -SS-TOTAL-ENERGIES,0 -SS-UNICREDIT-SPA,0 -SS-VW,0 -SS-WHARF-HLDS,0 -SS-WUXI-BIOLOGICS,0 -SS-XIAOMI,0 -SS-YES-BANK,0 -SS-ZUR-INSR,0 +SP500_mini,0.0 +SP600-SMALL,0.0 +SPI200,0.57 +SS-AIA-GROUP,0.0 +SS-ALIBABA-HLDS,0.0 +SS-ASML,0.0 +SS-ASS-GEN,0.0 +SS-BAIDU,0.0 +SS-BANK-CHINA,0.0 +SS-BOC-HONG-KONG,0.0 +SS-BUDWEISER-BREWING,0.0 +SS-BYD-CO,0.0 +SS-CHINA-CONSTR-BK,0.0 +SS-CHINA-MERCH-BANK,0.0 +SS-CHINA-MOBILE,0.0 +SS-CHINA-UNICOM-HK,0.0 +SS-CITIC-LTD,0.0 +SS-CK-HUTCHISON,0.0 +SS-CLP-HOLDINGS,0.0 +SS-CNOOC,0.0 +SS-DTE,0.0 +SS-ENEL-SPA,0.0 +SS-HDFC,0.0 +SS-HEINK,0.0 +SS-HIND-UNILV,0.0 +SS-HK-CHINA-GAS,0.0 +SS-HK-EXCH-CLEAR,0.0 +SS-HOUS-DEV-FIN,0.0 +SS-HSBC-HLDS,0.0 +SS-IBERDROLA,0.0 +SS-IND-COMM-BK-OF-CHINA,0.0 +SS-IND-DIS-TEXT,0.0 +SS-INFOSYS,0.0 +SS-JD-COM,0.0 +SS-KOTAK-BANK,0.0 +SS-LOREAL,0.0 +SS-MEITUAN-B,0.0 +SS-NETEASE,0.0 +SS-NONGFU,0.0 +SS-OVERSEA-CH-BK,0.0 +SS-PING-AN-INSURANCE,0.0 +SS-POWER-ASSETS-HLDS,0.0 +SS-RELIANCE,0.0 +SS-ROCHE,0.0 +SS-SING-TEL,0.0 +SS-SUN-HUNG-KAI,0.0 +SS-TATA,0.0 +SS-TENCENT-HLDS,0.0 +SS-THAI-BEV,0.0 +SS-TOTAL-ENERGIES,0.0 +SS-UNICREDIT-SPA,0.0 +SS-VW,0.0 +SS-WHARF-HLDS,0.0 +SS-WUXI-BIOLOGICS,0.0 +SS-XIAOMI,0.0 +SS-YES-BANK,0.0 +SS-ZUR-INSR,0.0 STEEL,5.4 STERLING3,0.0025 SUGAR11,0.005 -SUGAR16,0 -SUGAR_WHITE,0 +SUGAR16,0.3 +SUGAR_WHITE,0.2 SWISSLEAD,3.3 -TECDAX,0 -TECH60_small,0 +TECDAX,0.0 +TECH60_small,0.0 THB,0.08 -TIN_LME,0 +TIN_LME,12000.0 TOPIX,0.18 -TOPIX30,0 -TOPIX_Large,0 -TSE60,0 -TSX,0 +TOPIX30,0.0 +TOPIX_Large,0.0 +TSE60,0.0 +TSX,0.0 TWD,0.0093 TWD-mini,0.00015 -UMBS-20,0 -UMBS-25,0 -UMBS-30,0 -UMBS-35,0 -UMBS-40,0 -UMBS-45,0 -UMBS-50,0 -URANIUM,0 -US-BIOTECH,0 +UMBS-20,0.0 +UMBS-25,0.0 +UMBS-30,0.0 +UMBS-35,0.0 +UMBS-40,0.0 +UMBS-45,0.0 +UMBS-50,0.0 +URANIUM,0.0 +US-BIOTECH,0.0 US-DISCRETE,0.48 US-ENERGY,0.3 US-FINANCE,0.12 US-HEALTH,0.3 US-INDUSTRY,0.3 -US-INSURE,0 +US-INSURE,0.0 US-MATERIAL,0.3 -US-OILGAS,0 +US-OILGAS,0.0 US-PROPERTY,0.094 US-REALESTATE,0.1 -US-REGBANK,0 -US-RETAIL,0 -US-SEMICONDUCTOR,0 +US-REGBANK,0.0 +US-RETAIL,0.0 +US-SEMICONDUCTOR,0.0 US-STAPLES,0.17 US-TECH,0.35 US-UTILS,0.25 US10,0.008 US10U,0.0081 -US10Y_micro,0 -US10Y_small,0 +US10Y_micro,0.0 +US10Y_small,0.0 US2,0.002 US20,0.016 -US20-new,0.072 -US2Y_micro,0 -US2Y_small,0 +US20-new,0.07200000000000001 +US2Y_micro,0.0 +US2Y_small,0.0 US3,0.0033 US30,0.016 -US30Y_micro,0 -US30Y_small,0 +US30Y_micro,0.0 +US30Y_small,0.0 US5,0.0039 -US5Y_micro,0 -USDCAD_micro,0 -USDCHF_micro,0 -USDCNH-CME,0 -USDCNH-HK,0 -USDCNH-SGX_mini,0 -USDINR,0 +US5Y_micro,0.0 +USDCAD_micro,0.0 +USDCHF_micro,0.0 +USDCNH-CME,0.0 +USDCNH-HK,0.0 +USDCNH-SGX_mini,0.0 +USDINR,0.0 USDKRW,0.11 -USDMXP,0 -USD_small,0 -USIRS10,0.027 -USIRS10ERIS,0 -USIRS10_ICE,0 -USIRS12ERIS,0 -USIRS15ERIS,0 -USIRS2,0 -USIRS20ERIS,0 -USIRS2ERIS,0.007 -USIRS2_ICE,0 +USDMXP,0.0 +USD_small,0.0 +USIRS10,0.027000000000000003 +USIRS10ERIS,0.0 +USIRS10_ICE,0.0 +USIRS12ERIS,0.0 +USIRS15ERIS,0.0 +USIRS2,0.0 +USIRS20ERIS,0.0 +USIRS2ERIS,0.006999999999999999 +USIRS2_ICE,0.0 USIRS30,0.09375 -USIRS3ERIS,0 -USIRS4ERIS,0 +USIRS3ERIS,0.0 +USIRS4ERIS,0.0 USIRS5,0.017 USIRS5ERIS,0.012 -USIRS5_ICE,0 -USIRS7ERIS,0 +USIRS5_ICE,0.0 +USIRS7ERIS,0.0 V2X,0.026 -VHANG,0 +VHANG,0.0 VIX,0.025 VIX_mini,0.032 -VNKI,0.44 -VOLQ,0 -WATER-CALI,0 +VNKI,0.24 +VOLQ,0.0 +WATER-CALI,0.0 WHEAT,0.37 -WHEAT-ASX,0 -WHEAT_ICE,0 -WHEAT_mini,0.5 +WHEAT-ASX,0.0 +WHEAT_ICE,0.7 +WHEAT_mini,0.28 WHEY,0.59 -WTI_small,0 -YENEUR,0.011 -YENEUR-ICE,0 -ZAR,9.5E-05 -ZINC_LME,0 +WTI_small,0.0 +YENEUR,0.011000000000000001 +YENEUR-ICE,0.0 +ZAR,3.6e-05 +ZINC_LME,1.8 From eefc4e0f2f71c342d0032aec65ef58104bfcbc08 Mon Sep 17 00:00:00 2001 From: Rob Carver Date: Wed, 22 Nov 2023 09:46:53 +0000 Subject: [PATCH 142/235] removed annoying warning from volumes --- sysproduction/data/volumes.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sysproduction/data/volumes.py b/sysproduction/data/volumes.py index 3a28dc41c2..de6a3530d7 100644 --- a/sysproduction/data/volumes.py +++ b/sysproduction/data/volumes.py @@ -111,6 +111,6 @@ def get_smoothed_volume_ignoring_old_data( return 0.0 smoothed_recent_volumes = recent_volumes.ewm(span=span).mean() - final_volume = smoothed_recent_volumes[-1] + final_volume = smoothed_recent_volumes.iloc[-1] return final_volume From 6b655488b001746cac0ff9a45fb28eb78d0d5299 Mon Sep 17 00:00:00 2001 From: Rob Carver Date: Wed, 22 Nov 2023 09:47:58 +0000 Subject: [PATCH 143/235] removed annoying warning from contract positions --- sysdata/production/historic_contract_positions.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sysdata/production/historic_contract_positions.py b/sysdata/production/historic_contract_positions.py index 2315e9ea8e..7df0c3a795 100644 --- a/sysdata/production/historic_contract_positions.py +++ b/sysdata/production/historic_contract_positions.py @@ -306,7 +306,7 @@ def _infer_position_at_start( if no_positions_before_start: position_at_start = 0 else: - last_position_before_start = positions_before_start[-1] + last_position_before_start = positions_before_start.iloc[-1] position_at_start = last_position_before_start return position_at_start From c5091232ca86c9c9fa3ae650ef4a67ee1d31d4d7 Mon Sep 17 00:00:00 2001 From: Rob Carver Date: Wed, 22 Nov 2023 09:51:34 +0000 Subject: [PATCH 144/235] fix append error in trade reporting --- sysproduction/reporting/data/trades.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sysproduction/reporting/data/trades.py b/sysproduction/reporting/data/trades.py index ab880e5409..3e0f70188e 100644 --- a/sysproduction/reporting/data/trades.py +++ b/sysproduction/reporting/data/trades.py @@ -401,7 +401,7 @@ def delay_row(order_row): "fill_datetime", ] ] - new_order_row = new_order_row.append( + new_order_row = new_order_row._append( pd.Series( [submit_minus_generated, filled_minus_submit], index=["submit_minus_generated", "filled_minus_submit"], From dbf8f8de5025b6d7f2d3cbe43546e1857f646492 Mon Sep 17 00:00:00 2001 From: Rob Carver Date: Wed, 22 Nov 2023 09:52:30 +0000 Subject: [PATCH 145/235] fix append error in pandl reporting --- sysproduction/reporting/data/pandl.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sysproduction/reporting/data/pandl.py b/sysproduction/reporting/data/pandl.py index 17e77d6f93..08bc9d8157 100644 --- a/sysproduction/reporting/data/pandl.py +++ b/sysproduction/reporting/data/pandl.py @@ -80,7 +80,7 @@ def get_strategy_pandl_and_residual(self): total_pandl = get_total_capital_pandl(self.data, self.start_date, self.end_date) residual_pandl = total_pandl - total_pandl_strategies residual_dfrow = pd.DataFrame(dict(codes=["residual"], pandl=residual_pandl)) - strategies_pandl = strategies_pandl.append(residual_dfrow) + strategies_pandl = strategies_pandl._append(residual_dfrow) strategies_pandl.pandl = strategies_pandl.pandl return strategies_pandl From 075724ddb983328f58783139de755f37ad877d8d Mon Sep 17 00:00:00 2001 From: Rob Carver Date: Wed, 22 Nov 2023 09:54:37 +0000 Subject: [PATCH 146/235] fix append error in trade reporting --- sysproduction/reporting/data/trades.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/sysproduction/reporting/data/trades.py b/sysproduction/reporting/data/trades.py index 3e0f70188e..4acca016d0 100644 --- a/sysproduction/reporting/data/trades.py +++ b/sysproduction/reporting/data/trades.py @@ -225,7 +225,7 @@ def cash_slippage_row(slippage_row, data): "trade", ] ] - new_slippage_row = new_slippage_row.append( + new_slippage_row = new_slippage_row._append( pd.Series( [ value_of_price_point, @@ -307,7 +307,7 @@ def vol_slippage_row(slippage_row, data): "trade", ] ] - new_slippage_row = new_slippage_row.append( + new_slippage_row = new_slippage_row._append( pd.Series( [ last_annual_vol, From 9098fd6eef277a38942e74e07e9a712928a41e06 Mon Sep 17 00:00:00 2001 From: Rob Carver Date: Wed, 22 Nov 2023 09:57:41 +0000 Subject: [PATCH 147/235] removed future warning from pandl calculator --- systems/accounts/pandl_calculators/pandl_using_fills.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/systems/accounts/pandl_calculators/pandl_using_fills.py b/systems/accounts/pandl_calculators/pandl_using_fills.py index 804fd18b23..2563f1698c 100644 --- a/systems/accounts/pandl_calculators/pandl_using_fills.py +++ b/systems/accounts/pandl_calculators/pandl_using_fills.py @@ -103,7 +103,7 @@ def merge_fill_prices_with_prices( prices_to_use.columns = ["price", "fill_price"] # Where no fill price available, use price - prices_to_use = prices_to_use.fillna(axis=1, method="ffill") + prices_to_use = prices_to_use.ffill(axis=1) prices_to_use = prices_to_use.fill_price From dd5e61c0a14beaea11be51f8f5aaf85d2e7f8ed4 Mon Sep 17 00:00:00 2001 From: Rob Carver Date: Wed, 22 Nov 2023 10:04:14 +0000 Subject: [PATCH 148/235] removed warning from capital data --- sysdata/production/capital.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/sysdata/production/capital.py b/sysdata/production/capital.py index 2f68095538..facb8f996d 100644 --- a/sysdata/production/capital.py +++ b/sysdata/production/capital.py @@ -32,20 +32,20 @@ class capitalData(baseData): def get_current_total_capital(self) -> float: pd_series = self.get_total_capital_pd_series() - return float(pd_series[-1]) + return float(pd_series.iloc[-1]) def get_current_broker_account_value(self) -> float: pd_series = self.get_broker_account_value_pd_series() - return float(pd_series[-1]) + return float(pd_series.iloc[-1]) def get_current_maximum_capital_value(self) -> float: pd_series = self.get_maximum_account_value_pd_series() - return float(pd_series[-1]) + return float(pd_series.iloc[-1]) def get_current_pandl_account(self) -> float: pd_series = self.get_profit_and_loss_account_pd_series() - return float(pd_series[-1]) + return float(pd_series.iloc[-1]) def get_total_capital_pd_series(self) -> pd.Series: all_capital_series = self.get_df_of_all_global_capital() From 23a1be0a546fdb38fea12a7bd4440f20425301f6 Mon Sep 17 00:00:00 2001 From: Rob Carver Date: Wed, 22 Nov 2023 10:13:48 +0000 Subject: [PATCH 149/235] more pandas fixes --- sysproduction/data/risk.py | 2 +- sysproduction/reporting/data/pricechanges.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/sysproduction/data/risk.py b/sysproduction/data/risk.py index 3f29e86dba..759ba3bd61 100644 --- a/sysproduction/data/risk.py +++ b/sysproduction/data/risk.py @@ -160,7 +160,7 @@ def get_current_ann_stdev_of_prices(data, instrument_code): try: current_stdev_ann_price_units = get_ann_ts_stdev_of_prices( data=data, instrument_code=instrument_code - )[-1] + ).iloc[-1] except: ## can happen for brand new instruments not properly loaded return np.nan diff --git a/sysproduction/reporting/data/pricechanges.py b/sysproduction/reporting/data/pricechanges.py index e37dd258e6..ed27a19208 100644 --- a/sysproduction/reporting/data/pricechanges.py +++ b/sysproduction/reporting/data/pricechanges.py @@ -220,4 +220,4 @@ def get_stdev_at_start_date_for_instrument( daily_returns = daily_price_series.diff() vol_series = daily_returns.ewm(30).std() - return vol_series[-1] + return vol_series.iloc[-1] From 93b61ee763216ee553574b60a5bd4b52361c4100 Mon Sep 17 00:00:00 2001 From: Rob Carver Date: Wed, 22 Nov 2023 10:24:44 +0000 Subject: [PATCH 150/235] remove warning in price changes --- sysproduction/reporting/data/pricechanges.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/sysproduction/reporting/data/pricechanges.py b/sysproduction/reporting/data/pricechanges.py index ed27a19208..422733fdf6 100644 --- a/sysproduction/reporting/data/pricechanges.py +++ b/sysproduction/reporting/data/pricechanges.py @@ -200,7 +200,7 @@ def get_price_change_from_series_for_period( price_series_for_period = price_series[start_date:end_date] if len(price_series_for_period) == 0: return np.nan - return price_series_for_period[-1] - price_series_for_period[0] + return price_series_for_period.iloc[-1] - price_series_for_period.iloc[0] def get_percentage_change_from_series_for_period( @@ -209,7 +209,7 @@ def get_percentage_change_from_series_for_period( price_series_for_period = price_series[start_date:end_date] if len(price_series_for_period) == 0: return np.nan - return 100 * ((price_series_for_period[-1] / price_series_for_period[0]) - 1) + return 100 * ((price_series_for_period.iloc[-1] / price_series_for_period.iloc[0]) - 1) def get_stdev_at_start_date_for_instrument( From 738f3b1c616f3870d387283320ae2a2b8518907a Mon Sep 17 00:00:00 2001 From: Rob Carver Date: Wed, 22 Nov 2023 11:03:15 +0000 Subject: [PATCH 151/235] updated version --- CHANGELOG.md | 8 ++++++++ README.md | 4 ++-- pyproject.toml | 2 +- 3 files changed, 11 insertions(+), 3 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index e765173278..93d176bf92 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,13 @@ # Release notes +## Version 1.80 + +- *NO LONGER REQUIRES ARCTIC* Time series data is stored in parquet, install pyarrow +- But does require newer versions of pandas and python, see requirements.txt +- See the discussion [here](https://github.com/robcarver17/pysystemtrade/discussions/1290) to see how to switch from arctic to parquet +- added long only constraint to dynamic optimisation +- various bugs fixed + ## Version 1.71 - Further progress in replacing old logging with python logging diff --git a/README.md b/README.md index 26a0455841..65e1cf4815 100644 --- a/README.md +++ b/README.md @@ -7,10 +7,10 @@ Rob Carver [https://qoppac.blogspot.com/p/pysystemtrade.html](https://qoppac.blogspot.com/p/pysystemtrade.html) -Version 1.71 +Version 1.80 -2023-07-19 +2023-11-22 diff --git a/pyproject.toml b/pyproject.toml index 937e6ddb48..7b41bd4152 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "pysystemtrader" -version = "1.61" +version = "1.80" authors = [ { name="Robert Carver", email="rob@systematicmoney.org" }, ] From 5b6b2a092aa7255fd594a429cead3955201068ed Mon Sep 17 00:00:00 2001 From: Rob Carver Date: Thu, 23 Nov 2023 09:17:13 +0000 Subject: [PATCH 152/235] bug reading spread data --- syscore/pandas/pdutils.py | 18 ++++++++++++++---- 1 file changed, 14 insertions(+), 4 deletions(-) diff --git a/syscore/pandas/pdutils.py b/syscore/pandas/pdutils.py index 6f1575a8f9..5e5491285b 100755 --- a/syscore/pandas/pdutils.py +++ b/syscore/pandas/pdutils.py @@ -11,7 +11,7 @@ from syscore.constants import named_object, arg_not_supplied DEFAULT_DATE_FORMAT_FOR_CSV = "%Y-%m-%d %H:%M:%S" - +EXPECTED_LENGTH_OF_DATE = 19 def rolling_pairwise_correlation( x: pd.DataFrame, periods: int, min_periods: int = 3 @@ -105,9 +105,7 @@ def pd_readcsv( df = pd.read_csv(filename, skiprows=skiprows, skipfooter=skipfooter) ## Add time index as index - df.index = pd.to_datetime(df[date_index_name], format=date_format).values - del df[date_index_name] - df.index.name = None + df = add_datetime_index(df=df, date_index_name=date_index_name, date_format=date_format) if input_column_mapping is not arg_not_supplied: df = remap_columns_in_pd(df, input_column_mapping) @@ -115,6 +113,18 @@ def pd_readcsv( return df +def add_datetime_index(df: pd.DataFrame, date_index_name: str, date_format: str = DEFAULT_DATE_FORMAT_FOR_CSV, expected_length_of_date: int = EXPECTED_LENGTH_OF_DATE) -> pd.DataFrame: + date_index = df[date_index_name] + date_index = date_index.astype(str) + def left(x:str, n): + return x[:n] + date_index = date_index.apply(left, n=EXPECTED_LENGTH_OF_DATE) + df.index = pd.to_datetime(date_index, format=date_format).values + del df[date_index_name] + df.index.name = None + + return df + def remap_columns_in_pd(df: pd.DataFrame, input_column_mapping: dict) -> pd.DataFrame: """ Returns the bool for columns of slice_data for which we have at least one non nan value From 198023a8289875b83ab353353e22fecde67a76c4 Mon Sep 17 00:00:00 2001 From: Rob Carver Date: Mon, 27 Nov 2023 09:17:06 +0000 Subject: [PATCH 153/235] arctic was still in setup and requirements --- requirements.txt | 3 +-- setup.py | 1 - 2 files changed, 1 insertion(+), 3 deletions(-) diff --git a/requirements.txt b/requirements.txt index 3c81597598..3572227e93 100644 --- a/requirements.txt +++ b/requirements.txt @@ -4,7 +4,6 @@ pyyaml==5.3.1 numpy>=1.24.0 scipy>=1.0.0 pymongo==3.11.3 -arctic==1.79.2 ib-insync==0.9.86 psutil==5.6.6 pytest>6.2 @@ -12,4 +11,4 @@ Flask>=2.0.1 Werkzeug>=2.0.1 statsmodels==0.14.0 PyPDF2>=2.5.0 -pyarrow>=14.0.1 \ No newline at end of file +pyarrow>=14.0.1 diff --git a/setup.py b/setup.py index 4b773656cc..1a1836b9f8 100755 --- a/setup.py +++ b/setup.py @@ -91,7 +91,6 @@ def dir_this_file(): "numpy>=1.24.0", "scipy>=1.0.0", "pymongo==3.11.3", - "arctic==1.79.2", "psutil==5.6.6", "pytest>6.2", "Flask>=2.0.1", From cbea9610bfaaeee20df4af5485a1dcd90c6d03c5 Mon Sep 17 00:00:00 2001 From: Rob Carver Date: Mon, 27 Nov 2023 09:26:00 +0000 Subject: [PATCH 154/235] blacked --- syscore/pandas/pdutils.py | 19 +++++++++++++++---- .../backup_parquet_data_to_remote.py | 8 ++------ sysproduction/data/directories.py | 2 +- sysproduction/reporting/data/pricechanges.py | 4 +++- sysproduction/run_backups.py | 2 +- 5 files changed, 22 insertions(+), 13 deletions(-) diff --git a/syscore/pandas/pdutils.py b/syscore/pandas/pdutils.py index 5e5491285b..582bc112ca 100755 --- a/syscore/pandas/pdutils.py +++ b/syscore/pandas/pdutils.py @@ -11,7 +11,8 @@ from syscore.constants import named_object, arg_not_supplied DEFAULT_DATE_FORMAT_FOR_CSV = "%Y-%m-%d %H:%M:%S" -EXPECTED_LENGTH_OF_DATE = 19 +EXPECTED_LENGTH_OF_DATE = 19 + def rolling_pairwise_correlation( x: pd.DataFrame, periods: int, min_periods: int = 3 @@ -105,7 +106,9 @@ def pd_readcsv( df = pd.read_csv(filename, skiprows=skiprows, skipfooter=skipfooter) ## Add time index as index - df = add_datetime_index(df=df, date_index_name=date_index_name, date_format=date_format) + df = add_datetime_index( + df=df, date_index_name=date_index_name, date_format=date_format + ) if input_column_mapping is not arg_not_supplied: df = remap_columns_in_pd(df, input_column_mapping) @@ -113,11 +116,18 @@ def pd_readcsv( return df -def add_datetime_index(df: pd.DataFrame, date_index_name: str, date_format: str = DEFAULT_DATE_FORMAT_FOR_CSV, expected_length_of_date: int = EXPECTED_LENGTH_OF_DATE) -> pd.DataFrame: +def add_datetime_index( + df: pd.DataFrame, + date_index_name: str, + date_format: str = DEFAULT_DATE_FORMAT_FOR_CSV, + expected_length_of_date: int = EXPECTED_LENGTH_OF_DATE, +) -> pd.DataFrame: date_index = df[date_index_name] date_index = date_index.astype(str) - def left(x:str, n): + + def left(x: str, n): return x[:n] + date_index = date_index.apply(left, n=EXPECTED_LENGTH_OF_DATE) df.index = pd.to_datetime(date_index, format=date_format).values del df[date_index_name] @@ -125,6 +135,7 @@ def left(x:str, n): return df + def remap_columns_in_pd(df: pd.DataFrame, input_column_mapping: dict) -> pd.DataFrame: """ Returns the bool for columns of slice_data for which we have at least one non nan value diff --git a/sysproduction/backup_parquet_data_to_remote.py b/sysproduction/backup_parquet_data_to_remote.py index 1018d1ce6d..29d482717c 100644 --- a/sysproduction/backup_parquet_data_to_remote.py +++ b/sysproduction/backup_parquet_data_to_remote.py @@ -1,11 +1,7 @@ import os from sysdata.config.production_config import get_production_config -from sysproduction.data.directories import ( - - get_parquet_backup_directory - -) +from sysproduction.data.directories import get_parquet_backup_directory from sysdata.data_blob import dataBlob @@ -18,7 +14,6 @@ def backup_parquet_data_to_remote(): return None - def get_parquet_directory(data): return data.parquet_root_directory @@ -33,6 +28,7 @@ def backup_parquet(self): log.debug("Copying data to backup destination") backup_parquet_data_to_remote_with_data(data) + def backup_parquet_data_to_remote_with_data(data): source_path = get_parquet_directory(data) destination_path = get_parquet_backup_directory() diff --git a/sysproduction/data/directories.py b/sysproduction/data/directories.py index 07907eef5c..6f82add5d5 100644 --- a/sysproduction/data/directories.py +++ b/sysproduction/data/directories.py @@ -7,7 +7,6 @@ production_config = get_production_config() - def get_main_backup_directory(): ans = production_config.get_element("offsystem_backup_directory") return get_resolved_pathname(ans) @@ -19,6 +18,7 @@ def get_csv_backup_directory(): return ans + def get_parquet_backup_directory(): main_backup = get_main_backup_directory() ans = os.path.join(main_backup, "parquet") diff --git a/sysproduction/reporting/data/pricechanges.py b/sysproduction/reporting/data/pricechanges.py index 422733fdf6..01d4cb1f28 100644 --- a/sysproduction/reporting/data/pricechanges.py +++ b/sysproduction/reporting/data/pricechanges.py @@ -209,7 +209,9 @@ def get_percentage_change_from_series_for_period( price_series_for_period = price_series[start_date:end_date] if len(price_series_for_period) == 0: return np.nan - return 100 * ((price_series_for_period.iloc[-1] / price_series_for_period.iloc[0]) - 1) + return 100 * ( + (price_series_for_period.iloc[-1] / price_series_for_period.iloc[0]) - 1 + ) def get_stdev_at_start_date_for_instrument( diff --git a/sysproduction/run_backups.py b/sysproduction/run_backups.py index 7b53cd0a8f..224102daec 100644 --- a/sysproduction/run_backups.py +++ b/sysproduction/run_backups.py @@ -29,7 +29,7 @@ def get_list_of_timer_functions_for_backup(): ("backup_db_to_csv", db_backup_object), ("backup_mongo_data_as_dump", mongodump_backup_object), ("backup_files", statefile_backup_object), - ("backup_parquet", parquet_backup_object) + ("backup_parquet", parquet_backup_object), ] return list_of_timer_names_and_functions From d1f01e9648fd697ae160a2c4df7c9a8d36f43e5c Mon Sep 17 00:00:00 2001 From: Andy Geach Date: Mon, 27 Nov 2023 12:20:32 +0000 Subject: [PATCH 155/235] update Black version to 23.11.0 --- .github/workflows/lint.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index 35e9c59225..cff5da7040 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -9,4 +9,4 @@ jobs: - uses: actions/checkout@v3 - uses: psf/black@stable with: - version: "22.12.0" + version: "23.11.0" From 1e41214a6957bc8fdf74a16b3db5473e1fe9ca7d Mon Sep 17 00:00:00 2001 From: Andy Geach Date: Mon, 27 Nov 2023 12:25:59 +0000 Subject: [PATCH 156/235] quick test runs python 3.10 --- .github/workflows/quick-test.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/quick-test.yml b/.github/workflows/quick-test.yml index 176c90a792..fea61a6acb 100644 --- a/.github/workflows/quick-test.yml +++ b/.github/workflows/quick-test.yml @@ -12,7 +12,7 @@ jobs: runs-on: ubuntu-20.04 strategy: matrix: - python-version: [ 3.8 ] + python-version: [ 3.10 ] steps: From 9f319ccf555fc11098f35cf6dc36d447f2458f8e Mon Sep 17 00:00:00 2001 From: todd <3578666+tgibson11@users.noreply.github.com> Date: Mon, 27 Nov 2023 07:01:52 -0700 Subject: [PATCH 157/235] HANG-TECH --> HANGTECH in spread costs (cherry picked from commit a372c5c3ad9ceb2a2e33bc99cee30b0950f57f68) --- data/futures/csvconfig/spreadcosts.csv | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/data/futures/csvconfig/spreadcosts.csv b/data/futures/csvconfig/spreadcosts.csv index 9e9c4d7a11..cdb269329d 100644 --- a/data/futures/csvconfig/spreadcosts.csv +++ b/data/futures/csvconfig/spreadcosts.csv @@ -236,7 +236,7 @@ GOLD-mini,0.24 GOLD_micro,0.08800000000000001 HANG,1.5 HANG-DIV,0.0 -HANG-TECH,0.0 +HANGTECH,0.0 HANGENT,1.0 HANGENT-GTR,0.0 HANGENT-NTR,0.0 From 7ed816c4dd06fbeea21fdee1ee88d898144d79c1 Mon Sep 17 00:00:00 2001 From: Andy Geach Date: Mon, 27 Nov 2023 12:27:01 +0000 Subject: [PATCH 158/235] slow test runs python 3.10 --- .github/workflows/slow-test-develop.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/slow-test-develop.yml b/.github/workflows/slow-test-develop.yml index bf50b9c783..9d1ea46dd3 100644 --- a/.github/workflows/slow-test-develop.yml +++ b/.github/workflows/slow-test-develop.yml @@ -12,7 +12,7 @@ jobs: runs-on: ubuntu-20.04 strategy: matrix: - python-version: [ 3.8, 3.7 ] + python-version: [ 3.10.13 ] steps: From 9c3523df0ce1b8a72fcd59dbaf279758089df86e Mon Sep 17 00:00:00 2001 From: Andy Geach Date: Mon, 27 Nov 2023 12:30:05 +0000 Subject: [PATCH 159/235] python 3.10.13 --- .github/workflows/quick-test.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/quick-test.yml b/.github/workflows/quick-test.yml index fea61a6acb..3fc83381d2 100644 --- a/.github/workflows/quick-test.yml +++ b/.github/workflows/quick-test.yml @@ -12,7 +12,7 @@ jobs: runs-on: ubuntu-20.04 strategy: matrix: - python-version: [ 3.10 ] + python-version: [ 3.10.13 ] steps: From 1a0b6e8beaebd54e4c2520bce2921b4ccdda7baa Mon Sep 17 00:00:00 2001 From: Andy Geach Date: Mon, 27 Nov 2023 13:22:26 +0000 Subject: [PATCH 160/235] add a default parquet store to config --- sysdata/config/defaults.yaml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/sysdata/config/defaults.yaml b/sysdata/config/defaults.yaml index 9e24a8d990..7f9bafba01 100644 --- a/sysdata/config/defaults.yaml +++ b/sysdata/config/defaults.yaml @@ -43,6 +43,9 @@ mongo_db: 'production' # format for mongo_host, eg mongodb://127.0.0.1:27018 mongo_port: 27017 # +# Parquet store +parquet_store: '/home/me/data/parquet' +# # Needs to be consistent with what you are using in crontab echo_extension: '.txt' # Spike checker From 289be40d82a8e0f396e513fe17114fc880e2048d Mon Sep 17 00:00:00 2001 From: Andy Geach Date: Mon, 27 Nov 2023 13:22:56 +0000 Subject: [PATCH 161/235] fix date format in algo tests --- syscore/tests/test_algos.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/syscore/tests/test_algos.py b/syscore/tests/test_algos.py index 21924dd64d..ac39a0db5e 100644 --- a/syscore/tests/test_algos.py +++ b/syscore/tests/test_algos.py @@ -16,7 +16,7 @@ def get_data(path): """ returns: DataFrame or Series if 1 col """ - df = pd_readcsv(resolve_path_and_filename_for_package(path)) + df = pd_readcsv(resolve_path_and_filename_for_package(path), date_format="%Y-%m-%d") if len(df.columns) == 1: return df[df.columns[0]] return df From 94f030226a650385ef8d53e380c6986ab6561253 Mon Sep 17 00:00:00 2001 From: Andy Geach Date: Mon, 27 Nov 2023 13:23:14 +0000 Subject: [PATCH 162/235] fix date format in roll config tests --- sysinit/futures/tests/test_sysinit_futures.py | 2 +- tests/test_examples.py | 20 +++++++++---------- 2 files changed, 11 insertions(+), 11 deletions(-) diff --git a/sysinit/futures/tests/test_sysinit_futures.py b/sysinit/futures/tests/test_sysinit_futures.py index 13f8e7640f..600abbabf9 100644 --- a/sysinit/futures/tests/test_sysinit_futures.py +++ b/sysinit/futures/tests/test_sysinit_futures.py @@ -13,7 +13,7 @@ class TestFuturesInit: input_date_index_name="Time", input_skiprows=0, input_skipfooter=0, - input_date_format="%Y-%m-%dT%H:%M:%S%z", + input_date_format="%Y-%m-%dT%H:%M:%S", input_column_mapping=dict( OPEN="Open", HIGH="High", LOW="Low", FINAL="Close", VOLUME="Volume" ), diff --git a/tests/test_examples.py b/tests/test_examples.py index 6ab6d08746..47801d9be7 100644 --- a/tests/test_examples.py +++ b/tests/test_examples.py @@ -131,7 +131,7 @@ def test_simple_system_trading_rules_estimated( my_system = System([fcs, my_rules, raw_data], data, my_config) my_config.forecast_scalar_estimate["pool_instruments"] = False print( - my_system.forecastScaleCap.get_forecast_scalar("EDOLLAR", "ewmac32").tail(5) + my_system.forecastScaleCap.get_forecast_scalar("SOFR", "ewmac32").tail(5) ) def test_simple_system_trading_rules_fixed(self, data, my_rules, fcs): @@ -144,7 +144,7 @@ def test_simple_system_trading_rules_fixed(self, data, my_rules, fcs): my_system = System([fcs, my_rules], data, my_config) print( - my_system.forecastScaleCap.get_capped_forecast("EDOLLAR", "ewmac32").tail(5) + my_system.forecastScaleCap.get_capped_forecast("SOFR", "ewmac32").tail(5) ) def test_simple_system_combing_rules( @@ -156,7 +156,7 @@ def test_simple_system_combing_rules( print(my_system.combForecast.get_forecast_weights("EDOLLAR").tail(5)) print( my_system.combForecast.get_forecast_diversification_multiplier( - "EDOLLAR" + "SOFR" ).tail(5) ) @@ -212,7 +212,7 @@ def test_simple_system_position_sizing( print(my_system.positionSize.get_instrument_value_vol("EDOLLAR").tail(5)) print( my_system.positionSize.get_average_position_at_subsystem_level( - "EDOLLAR" + "SOFR" ).tail(5) ) print(my_system.positionSize.get_vol_target_dict()) @@ -382,14 +382,14 @@ def test_simple_system_config_import(self, data): data, my_config, ) - print(my_system.rules.get_raw_forecast("EDOLLAR", "ewmac32").tail(5)) - print(my_system.rules.get_raw_forecast("EDOLLAR", "ewmac8").tail(5)) + print(my_system.rules.get_raw_forecast("SOFR", "ewmac32").tail(5)) + print(my_system.rules.get_raw_forecast("SOFR", "ewmac8").tail(5)) print( - my_system.forecastScaleCap.get_capped_forecast("EDOLLAR", "ewmac32").tail(5) + my_system.forecastScaleCap.get_capped_forecast("SOFR", "ewmac32").tail(5) ) - print(my_system.forecastScaleCap.get_forecast_scalar("EDOLLAR", "ewmac32")) - print(my_system.combForecast.get_combined_forecast("EDOLLAR").tail(5)) - print(my_system.combForecast.get_forecast_weights("EDOLLAR").tail(5)) + print(my_system.forecastScaleCap.get_forecast_scalar("SOFR", "ewmac32")) + print(my_system.combForecast.get_combined_forecast("SOFR").tail(5)) + print(my_system.combForecast.get_forecast_weights("SOFR").tail(5)) print(my_system.positionSize.get_subsystem_position("EDOLLAR").tail(5)) From 3403882795b16972840bdba220dfb5f9610069db Mon Sep 17 00:00:00 2001 From: Andy Geach Date: Mon, 27 Nov 2023 13:26:22 +0000 Subject: [PATCH 163/235] EDOLLAR -> SOFR --- tests/test_examples.py | 72 +++++++++++++++++++----------------------- 1 file changed, 33 insertions(+), 39 deletions(-) diff --git a/tests/test_examples.py b/tests/test_examples.py index 47801d9be7..cef949fb9b 100644 --- a/tests/test_examples.py +++ b/tests/test_examples.py @@ -49,7 +49,7 @@ def my_rules(ewmac_8, ewmac_32): def my_config(ewmac_8, ewmac_32): my_config = Config() my_config.trading_rules = dict(ewmac8=ewmac_8, ewmac32=ewmac_32) - my_config.instruments = ["US10", "EDOLLAR", "CORN", "SP500"] + my_config.instruments = ["US10", "SOFR", "CORN", "SP500"] my_config.notional_trading_capital = 1000000 my_config.exclude_instrument_lists = dict( ignore_instruments=["MILK"], @@ -100,7 +100,7 @@ def test_simple_system_rules(self, data, raw_data): my_system = System([my_rules, raw_data], data) print(my_system) - print(my_system.rules.get_raw_forecast("EDOLLAR", "ewmac").tail(5)) + print(my_system.rules.get_raw_forecast("SOFR", "ewmac").tail(5)) def test_simple_system_trading_rule(self, data, raw_data, ewmac_8, ewmac_32): ewmac_rule = TradingRule(ewmac) @@ -110,7 +110,7 @@ def test_simple_system_trading_rule(self, data, raw_data, ewmac_8, ewmac_32): print(my_rules.trading_rules()["ewmac32"]) my_system = System([my_rules, raw_data], data) - my_system.rules.get_raw_forecast("EDOLLAR", "ewmac32").tail(5) + my_system.rules.get_raw_forecast("SOFR", "ewmac32").tail(5) def test_simple_system_trading_rules_estimated( self, data, raw_data, ewmac_8, ewmac_32, fcs @@ -122,30 +122,26 @@ def test_simple_system_trading_rules_estimated( empty_rules = Rules() my_config.trading_rules = dict(ewmac8=ewmac_8, ewmac32=ewmac_32) my_system = System([empty_rules, raw_data], data, my_config) - my_system.rules.get_raw_forecast("EDOLLAR", "ewmac32").tail(5) + my_system.rules.get_raw_forecast("SOFR", "ewmac32").tail(5) # we can estimate these ourselves - my_config.instruments = ["US10", "EDOLLAR", "CORN", "SP500"] + my_config.instruments = ["US10", "SOFR", "CORN", "SP500"] my_config.use_forecast_scale_estimates = True my_system = System([fcs, my_rules, raw_data], data, my_config) my_config.forecast_scalar_estimate["pool_instruments"] = False - print( - my_system.forecastScaleCap.get_forecast_scalar("SOFR", "ewmac32").tail(5) - ) + print(my_system.forecastScaleCap.get_forecast_scalar("SOFR", "ewmac32").tail(5)) def test_simple_system_trading_rules_fixed(self, data, my_rules, fcs): # or we can use the values from the book my_config = Config() my_config.trading_rules = dict(ewmac8=ewmac_8, ewmac32=ewmac_32) - my_config.instruments = ["US10", "EDOLLAR", "CORN", "SP500"] + my_config.instruments = ["US10", "SOFR", "CORN", "SP500"] my_config.forecast_scalars = dict(ewmac8=5.3, ewmac32=2.65) my_config.use_forecast_scale_estimates = False my_system = System([fcs, my_rules], data, my_config) - print( - my_system.forecastScaleCap.get_capped_forecast("SOFR", "ewmac32").tail(5) - ) + print(my_system.forecastScaleCap.get_capped_forecast("SOFR", "ewmac32").tail(5)) def test_simple_system_combing_rules( self, data, raw_data, my_rules, my_config, fcs @@ -153,11 +149,11 @@ def test_simple_system_combing_rules( # defaults combiner = ForecastCombine() my_system = System([fcs, my_rules, combiner, raw_data], data, my_config) - print(my_system.combForecast.get_forecast_weights("EDOLLAR").tail(5)) + print(my_system.combForecast.get_forecast_weights("SOFR").tail(5)) print( - my_system.combForecast.get_forecast_diversification_multiplier( - "SOFR" - ).tail(5) + my_system.combForecast.get_forecast_diversification_multiplier("SOFR").tail( + 5 + ) ) @pytest.mark.slow # will be skipped unless run with 'pytest --runslow' @@ -192,7 +188,7 @@ def test_simple_system_combining_fixed(self, data, raw_data, my_config, fcs): my_system = System( [fcs, empty_rules, combiner, raw_data], data, my_config ) # no need for accounts if no estimation done - my_system.combForecast.get_combined_forecast("EDOLLAR").tail(5) + my_system.combForecast.get_combined_forecast("SOFR").tail(5) def test_simple_system_position_sizing( self, data, raw_data, my_rules, my_config, fcs, combiner, possizer @@ -206,17 +202,17 @@ def test_simple_system_position_sizing( [fcs, my_rules, combiner, possizer, raw_data], data, my_config ) - print(my_system.positionSize.get_price_volatility("EDOLLAR").tail(5)) - print(my_system.positionSize.get_block_value("EDOLLAR").tail(5)) - print(my_system.positionSize.get_underlying_price("EDOLLAR")) - print(my_system.positionSize.get_instrument_value_vol("EDOLLAR").tail(5)) + print(my_system.positionSize.get_price_volatility("SOFR").tail(5)) + print(my_system.positionSize.get_block_value("SOFR").tail(5)) + print(my_system.positionSize.get_underlying_price("SOFR")) + print(my_system.positionSize.get_instrument_value_vol("SOFR").tail(5)) print( - my_system.positionSize.get_average_position_at_subsystem_level( - "SOFR" - ).tail(5) + my_system.positionSize.get_average_position_at_subsystem_level("SOFR").tail( + 5 + ) ) print(my_system.positionSize.get_vol_target_dict()) - print(my_system.positionSize.get_subsystem_position("EDOLLAR").tail(5)) + print(my_system.positionSize.get_subsystem_position("SOFR").tail(5)) @pytest.mark.slow # will be skipped unless run with 'pytest --runslow' def test_simple_system_portfolio_estimated( @@ -246,7 +242,7 @@ def test_simple_system_portfolio_fixed( # or fixed my_config.use_instrument_weight_estimates = False my_config.use_instrument_div_mult_estimates = False - my_config.instrument_weights = dict(US10=0.1, EDOLLAR=0.4, CORN=0.3, SP500=0.2) + my_config.instrument_weights = dict(US10=0.1, SOFR=0.4, CORN=0.3, SP500=0.2) my_config.instrument_div_multiplier = 1.5 my_config.forecast_weights = dict(ewmac8=0.5, ewmac32=0.5) my_config.use_forecast_weight_estimates = False @@ -255,7 +251,7 @@ def test_simple_system_portfolio_fixed( [fcs, my_rules, combiner, possizer, portfolio, raw_data], data, my_config ) - print(my_system.portfolio.get_notional_position("EDOLLAR").tail(5)) + print(my_system.portfolio.get_notional_position("SOFR").tail(5)) def test_simple_system_costs( self, @@ -270,7 +266,7 @@ def test_simple_system_costs( account, ): my_config.forecast_weights = dict(ewmac8=0.5, ewmac32=0.5) - my_config.instrument_weights = dict(US10=0.1, EDOLLAR=0.4, CORN=0.3, SP500=0.2) + my_config.instrument_weights = dict(US10=0.1, SOFR=0.4, CORN=0.3, SP500=0.2) my_system = System( [fcs, my_rules, combiner, possizer, portfolio, account, raw_data], @@ -288,7 +284,7 @@ def test_simple_system_config_object(self, data, ewmac_8, ewmac_32): my_config = Config( dict( trading_rules=dict(ewmac8=ewmac_8, ewmac32=ewmac_32), - instrument_weights=dict(US10=0.1, EDOLLAR=0.4, CORN=0.3, SP500=0.2), + instrument_weights=dict(US10=0.1, SOFR=0.4, CORN=0.3, SP500=0.2), instrument_div_multiplier=1.5, forecast_scalars=dict(ewmac8=5.3, ewmac32=2.65), forecast_weights=dict(ewmac8=0.5, ewmac32=0.5), @@ -317,14 +313,14 @@ def test_simple_system_config_object(self, data, ewmac_8, ewmac_32): data, my_config, ) - print(my_system.portfolio.get_notional_position("EDOLLAR").tail(5)) + print(my_system.portfolio.get_notional_position("SOFR").tail(5)) @pytest.mark.slow # will be skipped unless run with 'pytest --runslow' def test_simple_system_risk_overlay(self, data, ewmac_8, ewmac_32): my_config = Config( dict( trading_rules=dict(ewmac8=ewmac_8, ewmac32=ewmac_32), - instrument_weights=dict(US10=0.1, EDOLLAR=0.4, CORN=0.3, SP500=0.2), + instrument_weights=dict(US10=0.1, SOFR=0.4, CORN=0.3, SP500=0.2), instrument_div_multiplier=1.5, forecast_scalars=dict(ewmac8=5.3, ewmac32=2.65), forecast_weights=dict(ewmac8=0.5, ewmac32=0.5), @@ -359,7 +355,7 @@ def test_simple_system_risk_overlay(self, data, ewmac_8, ewmac_32): data, my_config, ) - print(my_system.portfolio.get_notional_position("EDOLLAR").tail(5)) + print(my_system.portfolio.get_notional_position("SOFR").tail(5)) def test_simple_system_config_import(self, data): my_config = Config("systems.provided.example.simplesystemconfig.yaml") @@ -384,16 +380,14 @@ def test_simple_system_config_import(self, data): ) print(my_system.rules.get_raw_forecast("SOFR", "ewmac32").tail(5)) print(my_system.rules.get_raw_forecast("SOFR", "ewmac8").tail(5)) - print( - my_system.forecastScaleCap.get_capped_forecast("SOFR", "ewmac32").tail(5) - ) + print(my_system.forecastScaleCap.get_capped_forecast("SOFR", "ewmac32").tail(5)) print(my_system.forecastScaleCap.get_forecast_scalar("SOFR", "ewmac32")) print(my_system.combForecast.get_combined_forecast("SOFR").tail(5)) print(my_system.combForecast.get_forecast_weights("SOFR").tail(5)) - print(my_system.positionSize.get_subsystem_position("EDOLLAR").tail(5)) + print(my_system.positionSize.get_subsystem_position("SOFR").tail(5)) - print(my_system.portfolio.get_notional_position("EDOLLAR").tail(5)) + print(my_system.portfolio.get_notional_position("SOFR").tail(5)) @pytest.mark.slow # will be skipped unless run with 'pytest --runslow' def test_prebaked_simple_system(self): @@ -402,7 +396,7 @@ def test_prebaked_simple_system(self): """ my_system = simplesystem() print(my_system) - print(my_system.portfolio.get_notional_position("EDOLLAR").tail(5)) + print(my_system.portfolio.get_notional_position("SOFR").tail(5)) @pytest.mark.slow # will be skipped unless run with 'pytest --runslow' def test_prebaked_from_confg(self): @@ -412,7 +406,7 @@ def test_prebaked_from_confg(self): my_config = Config("systems.provided.example.simplesystemconfig.yaml") my_data = csvFuturesSimData() my_system = simplesystem(config=my_config, data=my_data) - print(my_system.portfolio.get_notional_position("EDOLLAR").tail(5)) + print(my_system.portfolio.get_notional_position("SOFR").tail(5)) @pytest.mark.slow # will be skipped unless run with 'pytest --runslow' def test_prebaked_chapter15(self): From effcd72e408b82c67e16ba8a5fa8941ad9c4a961 Mon Sep 17 00:00:00 2001 From: Andy Geach Date: Tue, 28 Nov 2023 13:22:55 +0000 Subject: [PATCH 164/235] undo monkeypatch work --- systems/tests/test_position_sizing.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/systems/tests/test_position_sizing.py b/systems/tests/test_position_sizing.py index b0676dc816..b81025b357 100644 --- a/systems/tests/test_position_sizing.py +++ b/systems/tests/test_position_sizing.py @@ -29,6 +29,9 @@ def setUp(self): self.data = data self.position_sizing = PositionSizing + def tearDown(self) -> None: + self.monkeypatch.undo() + @unittest.SkipTest def test_get_combined_forecast(self): self.assertAlmostEqual( @@ -60,8 +63,9 @@ def test_get_instrument_sizing_data(self): self.assertEqual(ans[1], 2500) def test_get_daily_cash_vol_target(self): - envs = {PRIVATE_CONFIG_DIR_ENV_VAR: "sysdata.tests.custom_private_config"} - self.monkeypatch.setattr(os, "environ", envs) + self.monkeypatch.setenv( + PRIVATE_CONFIG_DIR_ENV_VAR, "sysdata.tests.custom_private_config" + ) ans_dict = self.system.positionSize.get_vol_target_dict() self.assertEqual(ans_dict["base_currency"], "GBP") From 21fb1ab607d2bb4b54277ca89602e32d04facacf Mon Sep 17 00:00:00 2001 From: Andy Geach Date: Tue, 28 Nov 2023 15:12:40 +0000 Subject: [PATCH 165/235] skip weird 'ufunc' failing test --- sysdata/tests/test_config.py | 17 ++++++++--------- tests/test_examples.py | 1 + 2 files changed, 9 insertions(+), 9 deletions(-) diff --git a/sysdata/tests/test_config.py b/sysdata/tests/test_config.py index d7ffbf9afa..2035371392 100644 --- a/sysdata/tests/test_config.py +++ b/sysdata/tests/test_config.py @@ -1,7 +1,6 @@ from sysdata.config.configdata import Config from sysdata.config.control_config import get_control_config from sysdata.config.private_directory import PRIVATE_CONFIG_DIR_ENV_VAR -import os class TestConfig: @@ -11,16 +10,16 @@ def test_default(self): assert config.get_element("ib_idoffset") == 100 def test_custom_dir(self, monkeypatch): - envs = {PRIVATE_CONFIG_DIR_ENV_VAR: "sysdata.tests.custom_private_config"} - monkeypatch.setattr(os, "environ", envs) + monkeypatch.setenv( + PRIVATE_CONFIG_DIR_ENV_VAR, "sysdata.tests.custom_private_config" + ) Config.reset() config = Config.default_config() assert config.get_element("ib_idoffset") == 1000 def test_bad_custom_dir(self, monkeypatch): - envs = {PRIVATE_CONFIG_DIR_ENV_VAR: "sysdata.tests"} - monkeypatch.setattr(os, "environ", envs) + monkeypatch.setenv(PRIVATE_CONFIG_DIR_ENV_VAR, "sysdata.tests") Config.reset() config = Config.default_config() @@ -34,8 +33,9 @@ def test_default_control(self): ) def test_control_custom_dir(self, monkeypatch): - envs = {PRIVATE_CONFIG_DIR_ENV_VAR: "sysdata.tests.custom_private_config"} - monkeypatch.setattr(os, "environ", envs) + monkeypatch.setenv( + PRIVATE_CONFIG_DIR_ENV_VAR, "sysdata.tests.custom_private_config" + ) config = get_control_config() assert ( @@ -44,8 +44,7 @@ def test_control_custom_dir(self, monkeypatch): ) def test_control_bad_custom_dir(self, monkeypatch): - envs = {PRIVATE_CONFIG_DIR_ENV_VAR: "sysdata.tests"} - monkeypatch.setattr(os, "environ", envs) + monkeypatch.setenv(PRIVATE_CONFIG_DIR_ENV_VAR, "sysdata.tests") config = get_control_config() assert ( diff --git a/tests/test_examples.py b/tests/test_examples.py index cef949fb9b..59ff9b8318 100644 --- a/tests/test_examples.py +++ b/tests/test_examples.py @@ -253,6 +253,7 @@ def test_simple_system_portfolio_fixed( print(my_system.portfolio.get_notional_position("SOFR").tail(5)) + @pytest.mark.skip # TODO figure out why this fails def test_simple_system_costs( self, data, From 6269c003a1e634cb598c0fa298b4e28bda1cdbe0 Mon Sep 17 00:00:00 2001 From: Andy Geach Date: Thu, 30 Nov 2023 09:46:07 +0000 Subject: [PATCH 166/235] filter out nans with pd.isnull(), and convert to numeric with pd.to_numeric() --- systems/accounts/curves/account_curve.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/systems/accounts/curves/account_curve.py b/systems/accounts/curves/account_curve.py index 9e238958d4..af746eeb4e 100644 --- a/systems/accounts/curves/account_curve.py +++ b/systems/accounts/curves/account_curve.py @@ -277,7 +277,7 @@ def sortino(self): return sortino def vals(self): - vals = self.values[~np.isnan(self.values)] + vals = pd.to_numeric(self.values[~pd.isnull(self.values)], errors="coerce") return vals From 03f85c2a7cd773fb7a9ec85a44e185033478d07d Mon Sep 17 00:00:00 2001 From: Andy Geach Date: Thu, 30 Nov 2023 09:46:21 +0000 Subject: [PATCH 167/235] unskip test --- tests/test_examples.py | 1 - 1 file changed, 1 deletion(-) diff --git a/tests/test_examples.py b/tests/test_examples.py index 59ff9b8318..cef949fb9b 100644 --- a/tests/test_examples.py +++ b/tests/test_examples.py @@ -253,7 +253,6 @@ def test_simple_system_portfolio_fixed( print(my_system.portfolio.get_notional_position("SOFR").tail(5)) - @pytest.mark.skip # TODO figure out why this fails def test_simple_system_costs( self, data, From 3681d76e12bedf7d6716c09db6f2236bbd230340 Mon Sep 17 00:00:00 2001 From: Rob Carver Date: Fri, 1 Dec 2023 09:15:33 +0000 Subject: [PATCH 168/235] fix #1296 --- sysquant/estimators/vol.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sysquant/estimators/vol.py b/sysquant/estimators/vol.py index fdb1cb597b..fafba60b87 100644 --- a/sysquant/estimators/vol.py +++ b/sysquant/estimators/vol.py @@ -169,7 +169,7 @@ def mixed_vol_calc( vol = simple_ewvol_calc(daily_returns, days=days, min_periods=min_periods) slow_vol_days = slow_vol_years * BUSINESS_DAYS_IN_YEAR - long_vol = vol.ewm(slow_vol_days).mean() + long_vol = vol.ewm(span=slow_vol_days).mean() vol = long_vol * proportion_of_slow_vol + vol * (1 - proportion_of_slow_vol) From 977e8ba4358a4992627448972052bceb42099fac Mon Sep 17 00:00:00 2001 From: Rob Carver Date: Mon, 4 Dec 2023 10:28:14 +0000 Subject: [PATCH 169/235] couple more new pandas issues --- syscore/pandas/pdutils.py | 15 +++++++++++---- sysdata/sim/futures_sim_data_with_data_blob.py | 6 ++++++ 2 files changed, 17 insertions(+), 4 deletions(-) diff --git a/syscore/pandas/pdutils.py b/syscore/pandas/pdutils.py index 582bc112ca..24f06f81fe 100755 --- a/syscore/pandas/pdutils.py +++ b/syscore/pandas/pdutils.py @@ -13,6 +13,7 @@ DEFAULT_DATE_FORMAT_FOR_CSV = "%Y-%m-%d %H:%M:%S" EXPECTED_LENGTH_OF_DATE = 19 +FALLBACK_DATE_FORMAT_FOR_CSV = "%Y-%m-%d" def rolling_pairwise_correlation( x: pd.DataFrame, periods: int, min_periods: int = 3 @@ -85,6 +86,7 @@ def pd_readcsv( filename: str, date_index_name: str = "DATETIME", date_format: str = DEFAULT_DATE_FORMAT_FOR_CSV, + fallback_date_format: str = FALLBACK_DATE_FORMAT_FOR_CSV, input_column_mapping: Union[dict, named_object] = arg_not_supplied, skiprows: int = 0, skipfooter: int = 0, @@ -106,9 +108,14 @@ def pd_readcsv( df = pd.read_csv(filename, skiprows=skiprows, skipfooter=skipfooter) ## Add time index as index - df = add_datetime_index( - df=df, date_index_name=date_index_name, date_format=date_format - ) + try: + df = add_datetime_index( + df=df, date_index_name=date_index_name, date_format=date_format + ) + except: + df =add_datetime_index( + df=df, date_index_name=date_index_name, date_format=fallback_date_format + ) if input_column_mapping is not arg_not_supplied: df = remap_columns_in_pd(df, input_column_mapping) @@ -128,7 +135,7 @@ def add_datetime_index( def left(x: str, n): return x[:n] - date_index = date_index.apply(left, n=EXPECTED_LENGTH_OF_DATE) + date_index = date_index.apply(left, n=expected_length_of_date) df.index = pd.to_datetime(date_index, format=date_format).values del df[date_index_name] df.index.name = None diff --git a/sysdata/sim/futures_sim_data_with_data_blob.py b/sysdata/sim/futures_sim_data_with_data_blob.py index e5a557b485..dbff988846 100644 --- a/sysdata/sim/futures_sim_data_with_data_blob.py +++ b/sysdata/sim/futures_sim_data_with_data_blob.py @@ -13,6 +13,7 @@ assetClassesAndInstruments, futuresInstrumentWithMetaData, ) +from syscore.exceptions import missingData from sysobjects.spot_fx_prices import fxPrices from sysobjects.adjusted_prices import futuresAdjustedPrices from sysobjects.multiple_prices import futuresMultiplePrices @@ -70,6 +71,11 @@ def get_multiple_prices_from_start_date( self, instrument_code: str, start_date ) -> futuresMultiplePrices: data = self.db_futures_multiple_prices_data.get_multiple_prices(instrument_code) + if len(data)==0: + raise missingData( + "Data for %s not found! Remove from instrument list, or add to config.ignore_instruments" + % instrument_code + ) return data[start_date:] From e7003116134c78e6db7e878724eab09bbd606634 Mon Sep 17 00:00:00 2001 From: Mendel Friedman <77807315+meldinman@users.noreply.github.com> Date: Mon, 4 Dec 2023 19:38:45 -0500 Subject: [PATCH 170/235] Update strategy_functions.py --- syscore/pandas/strategy_functions.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/syscore/pandas/strategy_functions.py b/syscore/pandas/strategy_functions.py index 548bdcf890..8698b6cfcc 100644 --- a/syscore/pandas/strategy_functions.py +++ b/syscore/pandas/strategy_functions.py @@ -165,7 +165,7 @@ def calculate_cost_deflator(price: pd.Series) -> pd.Series: daily_returns = price_to_daily_returns(price) ## crude but doesn't matter vol_price = daily_returns.rolling(180, min_periods=3).std().ffill() - final_vol = vol_price[-1] + final_vol = vol_price.iloc[-1] cost_scalar = vol_price / final_vol From 3558d234a1f557a798f95335a76dcd0b05bbd30b Mon Sep 17 00:00:00 2001 From: Mendel Friedman <77807315+meldinman@users.noreply.github.com> Date: Tue, 5 Dec 2023 21:07:22 -0500 Subject: [PATCH 171/235] Update vol.py can now forward and backfill in chained method --- sysquant/estimators/vol.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/sysquant/estimators/vol.py b/sysquant/estimators/vol.py index fafba60b87..41bf4b1b44 100644 --- a/sysquant/estimators/vol.py +++ b/sysquant/estimators/vol.py @@ -113,10 +113,7 @@ def backfill_vol(vol: pd.Series) -> pd.Series: # have to fill forwards first, as it's only the start we want to # backfill, eg before any value available - vol_forward_fill = vol.fillna(method="ffill") - vol_backfilled = vol_forward_fill.fillna(method="bfill") - - return vol_backfilled + return vol.ffill().bfill() def mixed_vol_calc( From 30eab73e845439e8d975d47f241549ab33c15cc9 Mon Sep 17 00:00:00 2001 From: Rob Carver Date: Wed, 6 Dec 2023 09:12:46 +0000 Subject: [PATCH 172/235] remove future warning --- sysquant/estimators/vol.py | 2 +- systems/rawdata.py | 33 ++++++++++++++++++++++++++++++++- 2 files changed, 33 insertions(+), 2 deletions(-) diff --git a/sysquant/estimators/vol.py b/sysquant/estimators/vol.py index fafba60b87..dcad21ef31 100644 --- a/sysquant/estimators/vol.py +++ b/sysquant/estimators/vol.py @@ -94,7 +94,7 @@ def apply_vol_floor( floor_days: int = 500, ) -> pd.Series: # Find the rolling 5% quantile point to set as a minimum - vol_min = vol.rolling(min_periods=floor_min_periods, window=floor_days).quantile( + vol_min = vol.rolling(min_periods=floor_min_periods, window=floor_days).q( quantile=floor_min_quant ) diff --git a/systems/rawdata.py b/systems/rawdata.py index c0786d87f3..27afc40c0f 100644 --- a/systems/rawdata.py +++ b/systems/rawdata.py @@ -345,6 +345,7 @@ def _aggregate_daily_vol_normalised_returns_for_list_of_instruments( def _daily_vol_normalised_price_for_list_of_instruments( self, list_of_instruments: list ) -> pd.Series: + norm_returns = ( self._aggregate_daily_vol_normalised_returns_for_list_of_instruments( list_of_instruments @@ -373,6 +374,36 @@ def _by_asset_class_daily_vol_normalised_price_for_asset_class( return norm_price + @diagnostic() + def daily_vol_normalised_price_for_asset_class_with_redundant_instrument_code( + self, instrument_code: str, asset_class: str + ) -> pd.Series: + """ + Price for an asset class, built up from cumulative returns + + :param asset_class: str + :return: pd.Series + """ + + return self._by_asset_class_daily_vol_normalised_price_for_asset_class(asset_class) + + @diagnostic() + def system_with_redundant_instrument_code_passed( + self,instrument_code: str, asset_class: str + ): + ## allows ultimate flexibility when creating trading rules but be careful! + + return self.parent + + @diagnostic() + def instrument_code( + self,instrument_code: str + ) -> pd.Series: + ## allows ultimate flexibility when creating trading rules + + return instrument_code + + @output() def normalised_price_for_asset_class(self, instrument_code: str) -> pd.Series: """ @@ -702,4 +733,4 @@ def instrument_list(self) -> list: if __name__ == "__main__": import doctest - doctest.testmod() + doctest.testmod() \ No newline at end of file From 7bf683a1d41abe27f9d18766a0e1a7d265cb15c3 Mon Sep 17 00:00:00 2001 From: Rob Carver Date: Wed, 6 Dec 2023 09:37:43 +0000 Subject: [PATCH 173/235] remove future warning properly speed up percentage calculation --- sysquant/estimators/vol.py | 4 ++-- systems/accounts/pandl_calculators/pandl_calculation.py | 5 ++++- 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/sysquant/estimators/vol.py b/sysquant/estimators/vol.py index dcad21ef31..7ff5a06031 100644 --- a/sysquant/estimators/vol.py +++ b/sysquant/estimators/vol.py @@ -94,8 +94,8 @@ def apply_vol_floor( floor_days: int = 500, ) -> pd.Series: # Find the rolling 5% quantile point to set as a minimum - vol_min = vol.rolling(min_periods=floor_min_periods, window=floor_days).q( - quantile=floor_min_quant + vol_min = vol.rolling(min_periods=floor_min_periods, window=floor_days).quantile( + q=floor_min_quant ) # set this to zero for the first value then propagate forward, ensures diff --git a/systems/accounts/pandl_calculators/pandl_calculation.py b/systems/accounts/pandl_calculators/pandl_calculation.py index c21099b0ea..ebf1ced812 100644 --- a/systems/accounts/pandl_calculators/pandl_calculation.py +++ b/systems/accounts/pandl_calculators/pandl_calculation.py @@ -90,7 +90,10 @@ def percentage_pandl(self) -> pd.Series: def _percentage_pandl_given_pandl(self, pandl_in_base: pd.Series): capital = self.capital - capital_aligned = capital.reindex(pandl_in_base.index, method="ffill") + if type(capital) is pd.Series: + capital_aligned = capital.reindex(pandl_in_base.index, method="ffill") + elif type(capital) is float or type(capital) is int: + capital_aligned = capital return 100.0 * pandl_in_base / capital_aligned From 146360372de950f051f30f8ee2e2f6e24965a67c Mon Sep 17 00:00:00 2001 From: Andy Geach Date: Wed, 6 Dec 2023 15:32:52 +0000 Subject: [PATCH 174/235] fix project name --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 7b41bd4152..c4a529cf2d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,5 +1,5 @@ [project] -name = "pysystemtrader" +name = "pysystemtrade" version = "1.80" authors = [ { name="Robert Carver", email="rob@systematicmoney.org" }, From 3644c73b0fb508e433ca1099633512c35fce455f Mon Sep 17 00:00:00 2001 From: Andy Geach Date: Wed, 6 Dec 2023 15:33:16 +0000 Subject: [PATCH 175/235] black --- syscore/pandas/pdutils.py | 3 ++- sysdata/sim/futures_sim_data_with_data_blob.py | 2 +- systems/rawdata.py | 14 ++++++-------- 3 files changed, 9 insertions(+), 10 deletions(-) diff --git a/syscore/pandas/pdutils.py b/syscore/pandas/pdutils.py index 24f06f81fe..272af80866 100755 --- a/syscore/pandas/pdutils.py +++ b/syscore/pandas/pdutils.py @@ -15,6 +15,7 @@ FALLBACK_DATE_FORMAT_FOR_CSV = "%Y-%m-%d" + def rolling_pairwise_correlation( x: pd.DataFrame, periods: int, min_periods: int = 3 ) -> pd.Series: @@ -113,7 +114,7 @@ def pd_readcsv( df=df, date_index_name=date_index_name, date_format=date_format ) except: - df =add_datetime_index( + df = add_datetime_index( df=df, date_index_name=date_index_name, date_format=fallback_date_format ) diff --git a/sysdata/sim/futures_sim_data_with_data_blob.py b/sysdata/sim/futures_sim_data_with_data_blob.py index dbff988846..0e0f5f1193 100644 --- a/sysdata/sim/futures_sim_data_with_data_blob.py +++ b/sysdata/sim/futures_sim_data_with_data_blob.py @@ -71,7 +71,7 @@ def get_multiple_prices_from_start_date( self, instrument_code: str, start_date ) -> futuresMultiplePrices: data = self.db_futures_multiple_prices_data.get_multiple_prices(instrument_code) - if len(data)==0: + if len(data) == 0: raise missingData( "Data for %s not found! Remove from instrument list, or add to config.ignore_instruments" % instrument_code diff --git a/systems/rawdata.py b/systems/rawdata.py index 27afc40c0f..feebebaeee 100644 --- a/systems/rawdata.py +++ b/systems/rawdata.py @@ -345,7 +345,6 @@ def _aggregate_daily_vol_normalised_returns_for_list_of_instruments( def _daily_vol_normalised_price_for_list_of_instruments( self, list_of_instruments: list ) -> pd.Series: - norm_returns = ( self._aggregate_daily_vol_normalised_returns_for_list_of_instruments( list_of_instruments @@ -385,25 +384,24 @@ def daily_vol_normalised_price_for_asset_class_with_redundant_instrument_code( :return: pd.Series """ - return self._by_asset_class_daily_vol_normalised_price_for_asset_class(asset_class) + return self._by_asset_class_daily_vol_normalised_price_for_asset_class( + asset_class + ) @diagnostic() def system_with_redundant_instrument_code_passed( - self,instrument_code: str, asset_class: str + self, instrument_code: str, asset_class: str ): ## allows ultimate flexibility when creating trading rules but be careful! return self.parent @diagnostic() - def instrument_code( - self,instrument_code: str - ) -> pd.Series: + def instrument_code(self, instrument_code: str) -> pd.Series: ## allows ultimate flexibility when creating trading rules return instrument_code - @output() def normalised_price_for_asset_class(self, instrument_code: str) -> pd.Series: """ @@ -733,4 +731,4 @@ def instrument_list(self) -> list: if __name__ == "__main__": import doctest - doctest.testmod() \ No newline at end of file + doctest.testmod() From 235d47b6d8f03197e29cc130596f10532c78e7ab Mon Sep 17 00:00:00 2001 From: Andy Geach Date: Wed, 6 Dec 2023 18:40:14 +0000 Subject: [PATCH 176/235] black --- syscore/pandas/pdutils.py | 3 ++- sysdata/sim/futures_sim_data_with_data_blob.py | 2 +- systems/rawdata.py | 14 ++++++-------- 3 files changed, 9 insertions(+), 10 deletions(-) diff --git a/syscore/pandas/pdutils.py b/syscore/pandas/pdutils.py index 24f06f81fe..272af80866 100755 --- a/syscore/pandas/pdutils.py +++ b/syscore/pandas/pdutils.py @@ -15,6 +15,7 @@ FALLBACK_DATE_FORMAT_FOR_CSV = "%Y-%m-%d" + def rolling_pairwise_correlation( x: pd.DataFrame, periods: int, min_periods: int = 3 ) -> pd.Series: @@ -113,7 +114,7 @@ def pd_readcsv( df=df, date_index_name=date_index_name, date_format=date_format ) except: - df =add_datetime_index( + df = add_datetime_index( df=df, date_index_name=date_index_name, date_format=fallback_date_format ) diff --git a/sysdata/sim/futures_sim_data_with_data_blob.py b/sysdata/sim/futures_sim_data_with_data_blob.py index dbff988846..0e0f5f1193 100644 --- a/sysdata/sim/futures_sim_data_with_data_blob.py +++ b/sysdata/sim/futures_sim_data_with_data_blob.py @@ -71,7 +71,7 @@ def get_multiple_prices_from_start_date( self, instrument_code: str, start_date ) -> futuresMultiplePrices: data = self.db_futures_multiple_prices_data.get_multiple_prices(instrument_code) - if len(data)==0: + if len(data) == 0: raise missingData( "Data for %s not found! Remove from instrument list, or add to config.ignore_instruments" % instrument_code diff --git a/systems/rawdata.py b/systems/rawdata.py index 27afc40c0f..feebebaeee 100644 --- a/systems/rawdata.py +++ b/systems/rawdata.py @@ -345,7 +345,6 @@ def _aggregate_daily_vol_normalised_returns_for_list_of_instruments( def _daily_vol_normalised_price_for_list_of_instruments( self, list_of_instruments: list ) -> pd.Series: - norm_returns = ( self._aggregate_daily_vol_normalised_returns_for_list_of_instruments( list_of_instruments @@ -385,25 +384,24 @@ def daily_vol_normalised_price_for_asset_class_with_redundant_instrument_code( :return: pd.Series """ - return self._by_asset_class_daily_vol_normalised_price_for_asset_class(asset_class) + return self._by_asset_class_daily_vol_normalised_price_for_asset_class( + asset_class + ) @diagnostic() def system_with_redundant_instrument_code_passed( - self,instrument_code: str, asset_class: str + self, instrument_code: str, asset_class: str ): ## allows ultimate flexibility when creating trading rules but be careful! return self.parent @diagnostic() - def instrument_code( - self,instrument_code: str - ) -> pd.Series: + def instrument_code(self, instrument_code: str) -> pd.Series: ## allows ultimate flexibility when creating trading rules return instrument_code - @output() def normalised_price_for_asset_class(self, instrument_code: str) -> pd.Series: """ @@ -733,4 +731,4 @@ def instrument_list(self) -> list: if __name__ == "__main__": import doctest - doctest.testmod() \ No newline at end of file + doctest.testmod() From 9845c237e09067cedb9b3833fefe943f258fdb2f Mon Sep 17 00:00:00 2001 From: Andy Geach Date: Wed, 6 Dec 2023 18:42:36 +0000 Subject: [PATCH 177/235] refactor away uses of log_with_attributes() in the algo code --- sysexecution/algos/algo.py | 16 +- sysexecution/algos/algo_limit_orders.py | 1 - sysexecution/algos/algo_market.py | 25 +- sysexecution/algos/algo_original_best.py | 518 ++++++++++++----------- sysexecution/algos/common_functions.py | 36 -- 5 files changed, 311 insertions(+), 285 deletions(-) diff --git a/sysexecution/algos/algo.py b/sysexecution/algos/algo.py index 867f214e01..d3e3639222 100644 --- a/sysexecution/algos/algo.py +++ b/sysexecution/algos/algo.py @@ -1,7 +1,7 @@ from copy import copy from dataclasses import dataclass -from syscore.exceptions import missingContract, missingData +from syscore.exceptions import missingContract, missingData, orderCannotBeModified from syscore.constants import arg_not_supplied from sysexecution.orders.named_order_objects import missing_order @@ -242,3 +242,17 @@ def round_limit_price_to_tick_size( rounded_limit_price = min_tick * round(limit_price / min_tick) return rounded_limit_price + + def file_log_report_market_order( + self, broker_order_with_controls: orderWithControls + ): + ticker_object = broker_order_with_controls.ticker + current_tick = str(ticker_object.current_tick()) + + log_report = "Market order execution current tick %s" % current_tick + + self.data.log.debug( + log_report, + **broker_order_with_controls.order.log_attributes(), + method="temp", + ) diff --git a/sysexecution/algos/algo_limit_orders.py b/sysexecution/algos/algo_limit_orders.py index 7650d0a331..9a5ba081ca 100644 --- a/sysexecution/algos/algo_limit_orders.py +++ b/sysexecution/algos/algo_limit_orders.py @@ -10,7 +10,6 @@ post_trade_processing, MESSAGING_FREQUENCY, cancel_order, - file_log_report_market_order, ) from sysdata.data_blob import dataBlob from sysexecution.orders.contract_orders import contractOrder diff --git a/sysexecution/algos/algo_market.py b/sysexecution/algos/algo_market.py index 2f9dc7a7c5..f0f0d3431a 100644 --- a/sysexecution/algos/algo_market.py +++ b/sysexecution/algos/algo_market.py @@ -10,7 +10,6 @@ post_trade_processing, MESSAGING_FREQUENCY, cancel_order, - file_log_report_market_order, ) from sysexecution.order_stacks.broker_order_stack import orderWithControls from sysexecution.orders.broker_orders import market_order_type, brokerOrderType @@ -82,14 +81,17 @@ def order_type_to_use(self) -> brokerOrderType: def manage_live_trade( self, broker_order_with_controls: orderWithControls ) -> orderWithControls: - # TODO log_with_attributes - log = broker_order_with_controls.order.log_with_attributes(self.data.log) + log_attrs = { + **broker_order_with_controls.order.log_attributes(), + "method": "temp", + } data_broker = self.data_broker trade_open = True - log.debug( + self.data.log.debug( "Managing trade %s with market order" - % str(broker_order_with_controls.order) + % str(broker_order_with_controls.order), + **log_attrs, ) while trade_open: time.sleep(0.001) @@ -97,7 +99,7 @@ def manage_live_trade( messaging_frequency_seconds=MESSAGING_FREQUENCY ) if log_message_required: - file_log_report_market_order(log, broker_order_with_controls) + self.file_log_report_market_order(broker_order_with_controls) is_order_completed = broker_order_with_controls.completed() is_order_timeout = ( @@ -110,19 +112,22 @@ def manage_live_trade( ) ) if is_order_completed: - log.debug("Trade completed") + self.data.log.debug("Trade completed", **log_attrs) break if is_order_timeout: - log.debug("Run out of time to execute: cancelling") + self.data.log.debug( + "Run out of time to execute: cancelling", **log_attrs + ) broker_order_with_controls = cancel_order( self.data, broker_order_with_controls ) break if is_order_cancelled: - log.warning( - "Order has been cancelled apparently by broker: not by algo!" + self.data.log.warning( + "Order has been cancelled apparently by broker: not by algo!", + **log_attrs, ) break diff --git a/sysexecution/algos/algo_original_best.py b/sysexecution/algos/algo_original_best.py index 085e067ebb..0326be47a0 100644 --- a/sysexecution/algos/algo_original_best.py +++ b/sysexecution/algos/algo_original_best.py @@ -1,24 +1,25 @@ """ This is the original 'best execution' algo I used in my legacy system """ -from typing import Union + import time -from syscore.exceptions import missingData, marketClosed +from syscore.exceptions import missingData, marketClosed, orderCannotBeModified from sysexecution.orders.named_order_objects import missing_order -from sysdata.data_blob import dataBlob -from sysexecution.algos.algo import Algo, limit_price_from_offside_price +from sysexecution.algos.algo import ( + Algo, + limit_price_from_offside_price, +) from sysexecution.algos.common_functions import ( post_trade_processing, MESSAGING_FREQUENCY, cancel_order, - set_limit_price, check_current_limit_price_at_inside_spread, - file_log_report_market_order, limit_price_is_at_inside_spread, ) from sysexecution.tick_data import tickerObject, analysisTick from sysexecution.order_stacks.broker_order_stack import orderWithControls +from sysexecution.orders.base_orders import Order from sysexecution.orders.broker_orders import ( market_order_type, limit_order_type, @@ -28,7 +29,6 @@ from syslogging.logger import * -from sysproduction.data.broker import dataBroker # Here are the algo parameters # Hard coded; if you want to try different parameters make a hard copy and @@ -42,14 +42,16 @@ HOURS_BEFORE_MARKET_CLOSE_TO_SWITCH_TO_MARKET = 0.5 # imbalance -# if more than 5 times on the bid (if we're buying) than the offer, AND less than three times our quantity on the offer, -# then go aggressive +# if more than 5 times on the bid (if we're buying) than the offer, AND less than +# three times our quantity on the offer, then go aggressive IMBALANCE_THRESHOLD = 5 IMBALANCE_ADJ_FACTOR = 3 # we only do one contract at a time SIZE_LIMIT = 1 +no_need_to_switch = "_NO_NEED_TO_SWITCH" + class algoOriginalBest(Algo): """ @@ -82,14 +84,14 @@ def manage_trade( def prepare_and_submit_trade(self) -> orderWithControls: data = self.data contract_order = self.contract_order - # TODO log_with_attributes - log = contract_order.log_with_attributes(data.log) + log_attrs = {**contract_order.log_attributes(), "method": "temp"} ## check order type is 'best' not 'limit' or 'market' if not contract_order.order_type == best_order_type: - log.critical( + data.log.critical( "Order has been allocated to algo 'original-best' but order type is %s" - % str(contract_order.order_type) + % str(contract_order.order_type), + **log_attrs, ) return missing_order @@ -99,20 +101,19 @@ def prepare_and_submit_trade(self) -> orderWithControls: ) ) if cut_down_contract_order.trade != contract_order.trade: - log.debug( + data.log.debug( "Cut down order to size %s from %s because of algo size limit" - % (str(contract_order.trade), str(cut_down_contract_order.trade)) + % (str(contract_order.trade), str(cut_down_contract_order.trade)), + **log_attrs, ) ticker_object = self.data_broker.get_ticker_object_for_order( cut_down_contract_order ) try: - okay_to_do_limit_trade = limit_trade_viable( + okay_to_do_limit_trade = self.limit_trade_viable( ticker_object=ticker_object, - data_broker=self.data_broker, order=cut_down_contract_order, - log=log, ) except missingData: ## Safer not to trade at all @@ -130,8 +131,9 @@ def prepare_and_submit_trade(self) -> orderWithControls: ) else: # do a market order - log.debug( - "Conditions are wrong so doing market trade instead of limit trade" + data.log.debug( + "Conditions are wrong so doing market trade instead of limit trade", + **log_attrs, ) broker_order_with_controls = ( self.get_and_submit_broker_order_for_contract_order( @@ -141,296 +143,338 @@ def prepare_and_submit_trade(self) -> orderWithControls: return broker_order_with_controls - def manage_live_trade( - self, broker_order_with_controls_and_order_id: orderWithControls - ) -> orderWithControls: + def manage_live_trade(self, order_control: orderWithControls) -> orderWithControls: data = self.data - # TODO log_with_attributes - log = broker_order_with_controls_and_order_id.order.log_with_attributes( - data.log - ) + log_attrs = { + **order_control.order.log_attributes(), + "method": "temp", + } trade_open = True is_aggressive = False - log.debug( - "Managing trade %s with algo 'original-best'" - % str(broker_order_with_controls_and_order_id.order) + data.log.debug( + "Managing trade %s with algo 'original-best'" % str(order_control.order), + **log_attrs, ) - is_limit_trade = ( - broker_order_with_controls_and_order_id.order.order_type == limit_order_type - ) + is_limit_trade = order_control.order.order_type == limit_order_type while trade_open: time.sleep(0.001) - if broker_order_with_controls_and_order_id.message_required( + if order_control.message_required( messaging_frequency_seconds=MESSAGING_FREQUENCY ): - file_log_report( - log, is_aggressive, broker_order_with_controls_and_order_id - ) + self.file_log_report(is_aggressive, order_control) if is_limit_trade: if is_aggressive: ## aggressive keep limit price in line - set_aggressive_limit_price( - data_broker=self.data_broker, - broker_order_with_controls=broker_order_with_controls_and_order_id, + self.set_aggressive_limit_price( + broker_order_with_controls=order_control, ) else: # passive limit trade - reason_to_switch = reason_to_switch_to_aggressive( - data_broker=self.data_broker, - broker_order_with_controls=broker_order_with_controls_and_order_id, - log=log, + reason_to_switch = self.reason_to_switch_to_aggressive( + broker_order_with_controls=order_control, + ) + need_to_switch = self.required_to_switch_to_aggressive( + reason_to_switch ) - need_to_switch = required_to_switch_to_aggressive(reason_to_switch) if need_to_switch: - log.debug("Switch to aggressive because %s" % reason_to_switch) + data.log.debug( + "Switch to aggressive because %s" % reason_to_switch, + **log_attrs, + ) is_aggressive = True else: # market trade nothing to do pass - order_completed = broker_order_with_controls_and_order_id.completed() + order_completed = order_control.completed() if order_completed: - log.debug("Trade completed") + data.log.debug("Trade completed", **log_attrs) break - order_timeout = ( - broker_order_with_controls_and_order_id.seconds_since_submission() - > TOTAL_TIME_OUT - ) + order_timeout = order_control.seconds_since_submission() > TOTAL_TIME_OUT if order_timeout: - log.debug("Run out of time: cancelling") - broker_order_with_controls_and_order_id = cancel_order( - data, broker_order_with_controls_and_order_id + data.log.debug( + "Run out of time: cancelling", + **log_attrs, ) + order_control = cancel_order(data, order_control) break order_cancelled = ( self.data_broker.check_order_is_cancelled_given_control_object( - broker_order_with_controls_and_order_id + order_control ) ) if order_cancelled: - log.warning("Order has been cancelled: not by algo") + data.log.warning("Order has been cancelled: not by algo", **log_attrs) break - return broker_order_with_controls_and_order_id - - -def limit_trade_viable( # TODO passed logger instance - data_broker: dataBroker, - order: contractOrder, - ticker_object: tickerObject, - log, -) -> bool: - # no point doing limit order if we've got imbalanced size issues, as we'd - # switch to aggressive immediately - raise_adverse_size_issue = adverse_size_issue( - ticker_object, wait_for_valid_tick=True, log=log - ) - - if raise_adverse_size_issue: - log.debug("Limit trade not viable") - return False - - # or if not enough time left - if is_market_about_to_close(data_broker=data_broker, order=order, log=log): - log.debug( - "Market about to close or stack handler nearly close - doing market order" + return order_control + + def limit_trade_viable( + self, + order: contractOrder, + ticker_object: tickerObject, + ) -> bool: + log_attrs = {**order.log_attributes(), "method": "temp"} + + # no point doing limit order if we've got imbalanced size issues, as we'd + # switch to aggressive immediately + raise_adverse_size_issue = self.adverse_size_issue( + ticker_object, order, wait_for_valid_tick=True ) - return False - return True + if raise_adverse_size_issue: + self.data.log.debug("Limit trade not viable", **log_attrs) + return False + # or if not enough time left + if self.is_market_about_to_close(order=order): + self.data.log.debug( + "Market about to close or stack handler nearly close - " + "doing market order", + **log_attrs, + ) + return False -no_need_to_switch = "_NO_NEED_TO_SWITCH" + return True + def file_log_report( + self, is_aggressive: bool, broker_order_with_controls: orderWithControls + ): + limit_trade = broker_order_with_controls.order.order_type == limit_order_type + if limit_trade: + self.file_log_report_limit_order(is_aggressive, broker_order_with_controls) + else: + self.file_log_report_market_order(broker_order_with_controls) -def file_log_report( # TODO passed logger instance - log, is_aggressive: bool, broker_order_with_controls: orderWithControls -): - limit_trade = broker_order_with_controls.order.order_type == limit_order_type - if limit_trade: - file_log_report_limit_order(log, is_aggressive, broker_order_with_controls) - else: - file_log_report_market_order(log, broker_order_with_controls) - - -def file_log_report_limit_order( - log, is_aggressive: bool, broker_order_with_controls: orderWithControls -): - if is_aggressive: - agg_txt = "Aggressive" - else: - agg_txt = "Passive" - - limit_price = broker_order_with_controls.order.limit_price - broker_limit_price = broker_order_with_controls.broker_limit_price() - - ticker_object = broker_order_with_controls.ticker - current_tick = str(ticker_object.current_tick()) - - log_report = "%s execution with limit price desired:%f actual:%f last tick %s" % ( - agg_txt, - limit_price, - broker_limit_price, - current_tick, - ) - - log.debug(log_report) - - -def reason_to_switch_to_aggressive( - data_broker: dataBroker, - broker_order_with_controls: orderWithControls, - log, -) -> str: - ticker_object = broker_order_with_controls.ticker - - too_much_time = ( - broker_order_with_controls.seconds_since_submission() > PASSIVE_TIME_OUT - ) - if too_much_time: - return ( - "Time out after %f seconds" - % broker_order_with_controls.seconds_since_submission() + def file_log_report_limit_order( + self, is_aggressive: bool, broker_order_with_controls: orderWithControls + ): + if is_aggressive: + agg_txt = "Aggressive" + else: + agg_txt = "Passive" + + limit_price = broker_order_with_controls.order.limit_price + broker_limit_price = broker_order_with_controls.broker_limit_price() + + ticker_object = broker_order_with_controls.ticker + current_tick = str(ticker_object.current_tick()) + + log_report = ( + "%s execution with limit price desired:%f actual:%f last tick %s" + % ( + agg_txt, + limit_price, + broker_limit_price, + current_tick, + ) ) - market_about_to_close = is_market_about_to_close( - data_broker=data_broker, order=broker_order_with_controls, log=log - ) - if market_about_to_close: - return "Market is closing soon or stack handler will end soon" + self.data.log.debug( + log_report, + **broker_order_with_controls.order.log_attributes(), + method="temp", + ) - try: - adverse_price = ticker_object.adverse_price_movement_vs_reference() - if adverse_price: - return "Adverse price movement" + def reason_to_switch_to_aggressive( + self, + broker_order_with_controls: orderWithControls, + ) -> str: + ticker_object = broker_order_with_controls.ticker - adverse_size = adverse_size_issue( - ticker_object, wait_for_valid_tick=False, log=log + too_much_time = ( + broker_order_with_controls.seconds_since_submission() > PASSIVE_TIME_OUT ) - if adverse_size: + if too_much_time: return ( - "Imbalance ratio of %f exceeds threshold" - % ticker_object.latest_imbalance_ratio() + "Time out after %f seconds" + % broker_order_with_controls.seconds_since_submission() ) - ## everything is fine, stay with aggressive - return no_need_to_switch - - except: - return "Problem with data, switch to aggressive" + market_about_to_close = self.is_market_about_to_close( + order=broker_order_with_controls.order, + ) + if market_about_to_close: + return "Market is closing soon or stack handler will end soon" + try: + adverse_price = ticker_object.adverse_price_movement_vs_reference() + if adverse_price: + return "Adverse price movement" + + adverse_size = self.adverse_size_issue( + ticker_object, + broker_order_with_controls.order, + wait_for_valid_tick=False, + ) + if adverse_size: + return ( + "Imbalance ratio of %f exceeds threshold" + % ticker_object.latest_imbalance_ratio() + ) -def is_market_about_to_close( - data_broker: dataBroker, - order: Union[brokerOrder, contractOrder, orderWithControls], - log, -) -> bool: - try: - short_of_time = data_broker.less_than_N_hours_of_trading_left_for_contract( - order.futures_contract, - N_hours=HOURS_BEFORE_MARKET_CLOSE_TO_SWITCH_TO_MARKET, - ) - except marketClosed: - log.warning("Market has closed for active limit order %s!" % str(order)) - return True + ## everything is fine, stay with aggressive + return no_need_to_switch - return short_of_time + except: + return "Problem with data, switch to aggressive" + def is_market_about_to_close( + self, + order: Union[brokerOrder, contractOrder], + ) -> bool: + try: + short_of_time = ( + self.data_broker.less_than_N_hours_of_trading_left_for_contract( + order.futures_contract, + N_hours=HOURS_BEFORE_MARKET_CLOSE_TO_SWITCH_TO_MARKET, + ) + ) + except marketClosed: + self.data.log.warning( + "Market has closed for active limit order %s!" % str(order), + **order.log_attributes(), + method="temp", + ) + return True -def required_to_switch_to_aggressive(reason: str) -> bool: - if reason == no_need_to_switch: - return False - else: - return True + return short_of_time + @staticmethod + def required_to_switch_to_aggressive(reason: str) -> bool: + if reason == no_need_to_switch: + return False + else: + return True + + def adverse_size_issue( + self, ticker_object: tickerObject, order: Order, wait_for_valid_tick=False + ) -> bool: + if wait_for_valid_tick: + current_tick_analysis = ( + ticker_object.wait_for_valid_bid_and_ask_and_analyse_current_tick() + ) + else: + current_tick_analysis = ticker_object.current_tick_analysis -def adverse_size_issue( - ticker_object: tickerObject, log, wait_for_valid_tick=False -) -> bool: - if wait_for_valid_tick: - current_tick_analysis = ( - ticker_object.wait_for_valid_bid_and_ask_and_analyse_current_tick() + latest_imbalance_ratio_exceeded = self._is_imbalance_ratio_exceeded( + current_tick_analysis, order ) - else: - current_tick_analysis = ticker_object.current_tick_analysis - - latest_imbalance_ratio_exceeded = _is_imbalance_ratio_exceeded( - current_tick_analysis, log=log - ) - insufficient_size_on_our_preferred_side = ( - _is_insufficient_size_on_our_preferred_side( - ticker_object, current_tick_analysis, log=log + insufficient_size_on_our_preferred_side = ( + self._is_insufficient_size_on_our_preferred_side( + ticker_object, current_tick_analysis, order + ) ) - ) - if latest_imbalance_ratio_exceeded and insufficient_size_on_our_preferred_side: - return True - else: - return False - - -def _is_imbalance_ratio_exceeded(current_tick_analysis: analysisTick, log) -> bool: - latest_imbalance_ratio = current_tick_analysis.imbalance_ratio - latest_imbalance_ratio_exceeded = latest_imbalance_ratio > IMBALANCE_THRESHOLD + if latest_imbalance_ratio_exceeded and insufficient_size_on_our_preferred_side: + return True + else: + return False + + def _is_imbalance_ratio_exceeded( + self, current_tick_analysis: analysisTick, order: Order + ) -> bool: + latest_imbalance_ratio = current_tick_analysis.imbalance_ratio + latest_imbalance_ratio_exceeded = latest_imbalance_ratio > IMBALANCE_THRESHOLD + + if latest_imbalance_ratio_exceeded: + self.data.log.debug( + "Imbalance ratio for ticker %s %f exceeds threshold %f" + % ( + str(current_tick_analysis), + latest_imbalance_ratio, + IMBALANCE_THRESHOLD, + ), + **order.log_attributes(), + method="temp", + ) - if latest_imbalance_ratio_exceeded: - log.debug( - "Imbalance ratio for ticker %s %f exceeds threshold %f" - % (str(current_tick_analysis), latest_imbalance_ratio, IMBALANCE_THRESHOLD) + return latest_imbalance_ratio_exceeded + + def _is_insufficient_size_on_our_preferred_side( + self, + ticker_object: tickerObject, + current_tick_analysis: analysisTick, + order: Order, + ) -> bool: + abs_size_we_wish_to_trade = abs(ticker_object.qty) + size_we_require_to_trade_limit = ( + IMBALANCE_ADJ_FACTOR * abs_size_we_wish_to_trade ) + available_size_on_our_preferred_side = abs(current_tick_analysis.side_qty) - return latest_imbalance_ratio_exceeded + insufficient_size_on_our_preferred_side = ( + available_size_on_our_preferred_side < size_we_require_to_trade_limit + ) + if insufficient_size_on_our_preferred_side: + self.data.log.debug( + "On ticker %s we require size of %f (our trade %f * adjustment %f) " + "for a limit order but only %f available" + % ( + str(current_tick_analysis), + size_we_require_to_trade_limit, + abs_size_we_wish_to_trade, + IMBALANCE_ADJ_FACTOR, + available_size_on_our_preferred_side, + ), + **order.log_attributes(), + method="temp", + ) -def _is_insufficient_size_on_our_preferred_side( - ticker_object: tickerObject, current_tick_analysis: analysisTick, log -) -> bool: - abs_size_we_wish_to_trade = abs(ticker_object.qty) - size_we_require_to_trade_limit = IMBALANCE_ADJ_FACTOR * abs_size_we_wish_to_trade - available_size_on_our_preferred_side = abs(current_tick_analysis.side_qty) + return insufficient_size_on_our_preferred_side - insufficient_size_on_our_preferred_side = ( - available_size_on_our_preferred_side < size_we_require_to_trade_limit - ) + def set_aggressive_limit_price( + self, broker_order_with_controls: orderWithControls + ) -> orderWithControls: + limit_trade = broker_order_with_controls.order.order_type == limit_order_type + if not limit_trade: + # market trade, don't bother + return broker_order_with_controls - if insufficient_size_on_our_preferred_side: - log.debug( - "On ticker %s we require size of %f (our trade %f * adjustment %f) for a limit order but only %f available" - % ( - str(current_tick_analysis), - size_we_require_to_trade_limit, - abs_size_we_wish_to_trade, - IMBALANCE_ADJ_FACTOR, - available_size_on_our_preferred_side, - ) + new_limit_price = check_current_limit_price_at_inside_spread( + broker_order_with_controls ) + if new_limit_price is limit_price_is_at_inside_spread: + pass + else: + broker_order_with_controls = self.set_best_limit_price( + broker_order_with_controls, new_limit_price + ) - return insufficient_size_on_our_preferred_side - - -def set_aggressive_limit_price( - data_broker: dataBroker, broker_order_with_controls: orderWithControls -) -> orderWithControls: - limit_trade = broker_order_with_controls.order.order_type == limit_order_type - if not limit_trade: - # market trade, don't bother return broker_order_with_controls - new_limit_price = check_current_limit_price_at_inside_spread( - broker_order_with_controls - ) - if new_limit_price is limit_price_is_at_inside_spread: - pass - else: - broker_order_with_controls = set_limit_price( - data_broker, broker_order_with_controls, new_limit_price - ) + def set_best_limit_price( + self, + broker_order_with_controls: orderWithControls, + new_limit_price: float, + ): + log_attrs = { + **broker_order_with_controls.order.log_attributes(), + "method": "temp", + } - return broker_order_with_controls + try: + broker_order_with_controls = ( + self.data_broker.modify_limit_price_given_control_object( + broker_order_with_controls, new_limit_price + ) + ) + self.data.log.debug( + "Tried to change limit price to %f" % new_limit_price, + **log_attrs, + ) + except orderCannotBeModified as error: + self.data.log.debug( + "Can't modify limit price for order, error %s" % str(error), + **log_attrs, + ) + + return broker_order_with_controls diff --git a/sysexecution/algos/common_functions.py b/sysexecution/algos/common_functions.py index 44c60600e6..0ac8f7ff71 100644 --- a/sysexecution/algos/common_functions.py +++ b/sysexecution/algos/common_functions.py @@ -59,32 +59,6 @@ def cancel_order( return broker_order_with_controls -def set_limit_price( - data_broker: dataBroker, - broker_order_with_controls: orderWithControls, - new_limit_price: float, -): - log_attrs = {**broker_order_with_controls.order.log_attributes(), "method": "temp"} - - try: - broker_order_with_controls = ( - data_broker.modify_limit_price_given_control_object( - broker_order_with_controls, new_limit_price - ) - ) - data_broker.data.log.debug( - "Tried to change limit price to %f" % new_limit_price, - **log_attrs, - ) - except orderCannotBeModified as error: - data_broker.data.log.debug( - "Can't modify limit price for order, error %s" % str(error), - **log_attrs, - ) - - return broker_order_with_controls - - limit_price_is_at_inside_spread = -99999999999999.99 @@ -110,13 +84,3 @@ def check_current_limit_price_at_inside_spread( new_limit_price = current_side_price return new_limit_price - - -# TODO passed logger instance -def file_log_report_market_order(log, broker_order_with_controls: orderWithControls): - ticker_object = broker_order_with_controls.ticker - current_tick = str(ticker_object.current_tick()) - - log_report = "Market order execution current tick %s" % current_tick - - log.debug(log_report) From 99e690d6da7faabdec9be0ea3522a01634bff00d Mon Sep 17 00:00:00 2001 From: Andy Geach Date: Wed, 6 Dec 2023 22:34:24 +0000 Subject: [PATCH 178/235] refactoring away specific_log() --- sysbrokers/IB/client/ib_fx_client.py | 9 +- sysbrokers/IB/client/ib_price_client.py | 149 ++++++++++++------------ syslogging/adapter.py | 1 - sysobjects/contracts.py | 10 -- 4 files changed, 77 insertions(+), 92 deletions(-) diff --git a/sysbrokers/IB/client/ib_fx_client.py b/sysbrokers/IB/client/ib_fx_client.py index b39abfffdd..c6ca1c20a5 100644 --- a/sysbrokers/IB/client/ib_fx_client.py +++ b/sysbrokers/IB/client/ib_fx_client.py @@ -76,20 +76,21 @@ def broker_get_daily_fx_data( """ ccy_code = ccy1 + ccy2 - # TODO log.setup - log = self.log.setup(currency_code=ccy_code) + self.log.debug("Updating log attributes", currency_code=ccy_code) try: ibcontract = self.ib_spotfx_contract(ccy1, ccy2=ccy2) except missingContract: - log.warning("Can't find IB contract for %s%s" % (ccy1, ccy2)) + self.log.warning("Can't find IB contract for %s%s" % (ccy1, ccy2)) raise missingData # uses parent class ibClientPrices fx_data = self._get_generic_data_for_contract( - ibcontract, log=log, bar_freq=bar_freq, whatToShow="MIDPOINT" + ibcontract, bar_freq=bar_freq, whatToShow="MIDPOINT" ) + self.log.debug("Log attributes reset", method="clear") + return fx_data def ib_spotfx_contract(self, ccy1, ccy2="USD") -> Forex: diff --git a/sysbrokers/IB/client/ib_price_client.py b/sysbrokers/IB/client/ib_price_client.py index 0ba2d245a0..88f3d44e82 100644 --- a/sysbrokers/IB/client/ib_price_client.py +++ b/sysbrokers/IB/client/ib_price_client.py @@ -49,24 +49,28 @@ def broker_get_historical_futures_data_for_contract( :param freq: str; one of D, H, 5M, M, 10S, S :return: futuresContractPriceData """ - # TODO specific_log - specific_log = contract_object_with_ib_broker_config.specific_log(self.log) + self.log.debug( + "Updating log attributes", + **contract_object_with_ib_broker_config.log_attributes(), + ) try: ibcontract = self.ib_futures_contract( contract_object_with_ib_broker_config, allow_expired=allow_expired ) except missingContract: - specific_log.warning( + self.log.warning( "Can't resolve IB contract %s" % str(contract_object_with_ib_broker_config) ) raise missingData price_data = self._get_generic_data_for_contract( - ibcontract, log=specific_log, bar_freq=bar_freq, whatToShow=whatToShow + ibcontract, bar_freq=bar_freq, whatToShow=whatToShow ) + self.log.debug("Log attributes reset", method="clear") + return price_data def get_ticker_object_with_BS( @@ -173,10 +177,9 @@ def _ib_get_recent_bid_ask_tick_data_using_reqHistoricalTicks( return tick_data - def _get_generic_data_for_contract( # TODO passed logger instance + def _get_generic_data_for_contract( self, ibcontract: ibContract, - log=None, bar_freq: Frequency = DAILY_PRICE_FREQ, whatToShow: str = "TRADES", ) -> pd.DataFrame: @@ -187,15 +190,13 @@ def _get_generic_data_for_contract( # TODO passed logger instance :param freq: str; one of D, H, 5M, M, 10S, S :return: futuresContractPriceData """ - if log is None: - log = self.log try: - barSizeSetting, durationStr = _get_barsize_and_duration_from_frequency( + barSizeSetting, durationStr = self._get_barsize_and_duration_from_frequency( bar_freq ) except Exception as exception: - log.warning(exception) + self.log.warning(exception) raise missingData price_data_raw = self._ib_get_historical_data_of_duration_and_barSize( @@ -203,18 +204,17 @@ def _get_generic_data_for_contract( # TODO passed logger instance durationStr=durationStr, barSizeSetting=barSizeSetting, whatToShow=whatToShow, - log=log, ) price_data_as_df = self._raw_ib_data_to_df( - price_data_raw=price_data_raw, log=log + price_data_raw=price_data_raw, ) return price_data_as_df - def _raw_ib_data_to_df(self, price_data_raw: pd.DataFrame, log) -> pd.DataFrame: + def _raw_ib_data_to_df(self, price_data_raw: pd.DataFrame) -> pd.DataFrame: if price_data_raw is None: - log.warning("No price data from IB") + self.log.warning("No price data from IB") raise missingData price_data_as_df = price_data_raw[["open", "high", "low", "close", "volume"]] @@ -265,7 +265,6 @@ def _ib_get_historical_data_of_duration_and_barSize( durationStr: str = "1 Y", barSizeSetting: str = "1 day", whatToShow="TRADES", - log=None, ) -> pd.DataFrame: """ Returns historical prices for a contract, up to today @@ -273,11 +272,8 @@ def _ib_get_historical_data_of_duration_and_barSize( :returns list of prices in 4 tuples: Open high low close volume """ - if log is None: - log = self.log - last_call = self.last_historic_price_calltime - _avoid_pacing_violation(last_call, log=log) + self._avoid_pacing_violation(last_call) ## If live data is available a request for delayed data would be ignored by TWS. self.ib.reqMarketDataType(3) @@ -297,65 +293,64 @@ def _ib_get_historical_data_of_duration_and_barSize( return df - -def _get_barsize_and_duration_from_frequency(bar_freq: Frequency) -> (str, str): - barsize_lookup = dict( - [ - (Frequency.Day, "1 day"), - (Frequency.Hour, "1 hour"), - (Frequency.Minutes_15, "15 mins"), - (Frequency.Minutes_5, "5 mins"), - (Frequency.Minute, "1 min"), - (Frequency.Seconds_10, "10 secs"), - (Frequency.Second, "1 secs"), - ] - ) - - duration_lookup = dict( - [ - (Frequency.Day, "1 Y"), - (Frequency.Hour, "1 M"), - (Frequency.Minutes_15, "1 W"), - (Frequency.Minutes_5, "1 W"), - (Frequency.Minute, "1 D"), - (Frequency.Seconds_10, "14400 S"), - (Frequency.Second, "1800 S"), - ] - ) - try: - assert bar_freq in barsize_lookup.keys() - assert bar_freq in duration_lookup.keys() - except: - raise Exception( - "Barsize %s not recognised should be one of %s" - % (str(bar_freq), str(barsize_lookup.keys())) + @staticmethod + def _get_barsize_and_duration_from_frequency(bar_freq: Frequency) -> (str, str): + barsize_lookup = dict( + [ + (Frequency.Day, "1 day"), + (Frequency.Hour, "1 hour"), + (Frequency.Minutes_15, "15 mins"), + (Frequency.Minutes_5, "5 mins"), + (Frequency.Minute, "1 min"), + (Frequency.Seconds_10, "10 secs"), + (Frequency.Second, "1 secs"), + ] ) - ib_barsize = barsize_lookup[bar_freq] - ib_duration = duration_lookup[bar_freq] - - return ib_barsize, ib_duration - - -def _avoid_pacing_violation(last_call_datetime: datetime.datetime, log=get_logger("")): - printed_warning_already = False - while _pause_for_pacing(last_call_datetime): - if not printed_warning_already: - log.debug( - "Pausing %f seconds to avoid pacing violation" - % ( - last_call_datetime - + datetime.timedelta(seconds=PACING_INTERVAL_SECONDS) - - datetime.datetime.now() - ).total_seconds() + duration_lookup = dict( + [ + (Frequency.Day, "1 Y"), + (Frequency.Hour, "1 M"), + (Frequency.Minutes_15, "1 W"), + (Frequency.Minutes_5, "1 W"), + (Frequency.Minute, "1 D"), + (Frequency.Seconds_10, "14400 S"), + (Frequency.Second, "1800 S"), + ] + ) + try: + assert bar_freq in barsize_lookup.keys() + assert bar_freq in duration_lookup.keys() + except: + raise Exception( + "Barsize %s not recognised should be one of %s" + % (str(bar_freq), str(barsize_lookup.keys())) ) - printed_warning_already = True - pass - - -def _pause_for_pacing(last_call_datetime: datetime.datetime): - time_since_last_call = datetime.datetime.now() - last_call_datetime - seconds_since_last_call = time_since_last_call.total_seconds() - should_pause = seconds_since_last_call < PACING_INTERVAL_SECONDS - return should_pause + ib_barsize = barsize_lookup[bar_freq] + ib_duration = duration_lookup[bar_freq] + + return ib_barsize, ib_duration + + def _avoid_pacing_violation(self, last_call_datetime: datetime.datetime): + printed_warning_already = False + while self._pause_for_pacing(last_call_datetime): + if not printed_warning_already: + self.log.debug( + "Pausing %f seconds to avoid pacing violation" + % ( + last_call_datetime + + datetime.timedelta(seconds=PACING_INTERVAL_SECONDS) + - datetime.datetime.now() + ).total_seconds() + ) + printed_warning_already = True + pass + + @staticmethod + def _pause_for_pacing(last_call_datetime: datetime.datetime): + time_since_last_call = datetime.datetime.now() - last_call_datetime + seconds_since_last_call = time_since_last_call.total_seconds() + should_pause = seconds_since_last_call < PACING_INTERVAL_SECONDS + + return should_pause diff --git a/syslogging/adapter.py b/syslogging/adapter.py index 0bc60ddd9f..93fb681bbe 100644 --- a/syslogging/adapter.py +++ b/syslogging/adapter.py @@ -8,7 +8,6 @@ class DynamicAttributeLogger(logging.LoggerAdapter): """ - # TODO futures_contract.specific_log # TODO log_with_attributes """ diff --git a/sysobjects/contracts.py b/sysobjects/contracts.py index 03b95237c4..a392195fa1 100644 --- a/sysobjects/contracts.py +++ b/sysobjects/contracts.py @@ -90,16 +90,6 @@ def __init__( self._contract_date = contract_date_object self._params = parameter_object - def specific_log(self, log): # TODO remove - new_log = log.setup( - **{ - INSTRUMENT_CODE_LOG_LABEL: self.instrument_code, - CONTRACT_DATE_LOG_LABEL: self.date_str, - } - ) - - return new_log - def log_attributes(self): """ Returns a dict of futuresContract log attributes From 36750b1d5ef658a2c38eecdba20646a0df7e4130 Mon Sep 17 00:00:00 2001 From: Andy Geach Date: Thu, 7 Dec 2023 17:33:31 +0000 Subject: [PATCH 179/235] additional logger test for method 'clear' --- syslogging/tests/logging_tests.py | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/syslogging/tests/logging_tests.py b/syslogging/tests/logging_tests.py index 6023b6e8a4..f0df24d9f5 100644 --- a/syslogging/tests/logging_tests.py +++ b/syslogging/tests/logging_tests.py @@ -38,6 +38,21 @@ def test_attributes_clear(self, caplog): ("Clear", logging.INFO, "{'stage': 'second'} Clearing attributes") ] + def test_attributes_reset(self, caplog): + reset = get_logger("reset") + reset.info("Updating log attributes", **{"instrument_code": "GOLD"}) + assert caplog.record_tuples[0] == ( + "reset", + logging.INFO, + "{'instrument_code': 'GOLD'} Updating log attributes", + ) + reset.info("Log attributes reset", **{"method": "clear"}) + assert caplog.record_tuples[1] == ( + "reset", + logging.INFO, + "Log attributes reset", + ) + def test_attributes_preserve(self, caplog): preserve = get_logger("Preserve", {"stage": "first"}) preserve.info( From 4c15c247130074ec5cb1947282764963a0e3a1b1 Mon Sep 17 00:00:00 2001 From: Andy Geach Date: Fri, 8 Dec 2023 14:34:44 +0000 Subject: [PATCH 180/235] refactor away uses of log_with_attributes() in the order and stack handler code --- .../order_stacks/instrument_order_stack.py | 59 +-- sysexecution/orders/base_orders.py | 10 - sysexecution/orders/broker_orders.py | 23 -- sysexecution/orders/contract_orders.py | 22 -- sysexecution/orders/instrument_orders.py | 27 -- sysexecution/stack_handler/fills.py | 7 +- sysexecution/stack_handler/roll_orders.py | 31 +- .../spawn_children_from_instrument_orders.py | 353 +++++++++--------- .../stack_handler/stackHandlerCore.py | 191 +++++----- syslogging/adapter.py | 6 - 10 files changed, 315 insertions(+), 414 deletions(-) diff --git a/sysexecution/order_stacks/instrument_order_stack.py b/sysexecution/order_stacks/instrument_order_stack.py index 029ea11349..b38d9cd364 100644 --- a/sysexecution/order_stacks/instrument_order_stack.py +++ b/sysexecution/order_stacks/instrument_order_stack.py @@ -121,8 +121,7 @@ def _put_adjusting_order_on_stack( :param new_order: :return: """ - # TODO log_with_attributes - log = new_order.log_with_attributes(self.log) + log_attrs = {**new_order.log_attributes(), "method": "temp"} existing_orders = listOfOrders( [ @@ -131,8 +130,8 @@ def _put_adjusting_order_on_stack( ] ) - adjusted_order = calculate_adjusted_order_given_existing_orders( - new_order, existing_orders, log + adjusted_order = self.calculate_adjusted_order_given_existing_orders( + new_order, existing_orders ) if adjusted_order.is_zero_trade() and not allow_zero_orders: @@ -140,43 +139,45 @@ def _put_adjusting_order_on_stack( error_msg = "Adjusted order %s is zero, zero orders not allowed" % str( adjusted_order ) - log.warning(error_msg) + self.log.warning(error_msg, **log_attrs) raise zeroOrderException(error_msg) order_id = self._put_order_on_stack_and_get_order_id(adjusted_order) return order_id + def calculate_adjusted_order_given_existing_orders( + self, new_order: instrumentOrder, existing_orders: listOfOrders + ): + log_attrs = {**new_order.log_attributes(), "method": "temp"} -def calculate_adjusted_order_given_existing_orders( # TODO passed logger instance - new_order: instrumentOrder, existing_orders: listOfOrders, log -): - desired_new_trade = new_order.trade - ( - existing_trades, - net_existing_trades_to_execute, - ) = calculate_existing_trades_and_remainder(existing_orders) + desired_new_trade = new_order.trade + ( + existing_trades, + net_existing_trades_to_execute, + ) = calculate_existing_trades_and_remainder(existing_orders) - # can change sign - residual_trade = desired_new_trade - net_existing_trades_to_execute + # can change sign + residual_trade = desired_new_trade - net_existing_trades_to_execute - adjusted_order = ( - new_order.replace_required_trade_size_only_use_for_unsubmitted_trades( - residual_trade + adjusted_order = ( + new_order.replace_required_trade_size_only_use_for_unsubmitted_trades( + residual_trade + ) ) - ) - - log.debug( - "Already have orders %s wanted %s so putting on order for %s (%s)" - % ( - str(existing_trades), - str(desired_new_trade), - str(residual_trade), - str(adjusted_order), + + self.log.debug( + "Already have orders %s wanted %s so putting on order for %s (%s)" + % ( + str(existing_trades), + str(desired_new_trade), + str(residual_trade), + str(adjusted_order), + ), + **log_attrs, ) - ) - return adjusted_order + return adjusted_order def calculate_existing_trades_and_remainder( diff --git a/sysexecution/orders/base_orders.py b/sysexecution/orders/base_orders.py index 7d13b6ea48..88e7a10e00 100644 --- a/sysexecution/orders/base_orders.py +++ b/sysexecution/orders/base_orders.py @@ -441,16 +441,6 @@ def __eq__(self, other): return same_tradeable_object and same_trade - def log_with_attributes(self, log): - """ - Returns a new log object with order attributes added - - :param log: logger - :return: log - """ - - return log - def log_attributes(self): """ Returns a dict of order log attributes diff --git a/sysexecution/orders/broker_orders.py b/sysexecution/orders/broker_orders.py index bfed60c88b..efe3b6ff24 100644 --- a/sysexecution/orders/broker_orders.py +++ b/sysexecution/orders/broker_orders.py @@ -337,29 +337,6 @@ def from_dict(instrumentOrder, order_as_dict): return order - def log_with_attributes(self, log): - """ - Returns a new log object with broker_order attributes added - - :param log: logger - :return: log - """ - broker_order = self - new_log = log.setup( - **{ - STRATEGY_NAME_LOG_LABEL: broker_order.strategy_name, - INSTRUMENT_CODE_LOG_LABEL: broker_order.instrument_code, - CONTRACT_ORDER_ID_LOG_LABEL: if_object_matches_return_empty_string( - broker_order.parent, no_parent - ), - BROKER_ORDER_ID_LOG_LABEL: if_object_matches_return_empty_string( - broker_order.order_id, no_order_id - ), - } - ) - - return new_log - def log_attributes(self): """ Returns a dict of broker_order log attributes diff --git a/sysexecution/orders/contract_orders.py b/sysexecution/orders/contract_orders.py index 4e2154cc7a..97d2600ee9 100644 --- a/sysexecution/orders/contract_orders.py +++ b/sysexecution/orders/contract_orders.py @@ -280,28 +280,6 @@ def panic_order(self): def inter_spread_order(self): return bool(self.order_info["inter_spread_order"]) - def log_with_attributes(self, log): - """ - Returns a new log object with contract_order attributes added - - :param log: logger - :return: log - """ - new_log = log.setup( - **{ - STRATEGY_NAME_LOG_LABEL: self.strategy_name, - INSTRUMENT_CODE_LOG_LABEL: self.instrument_code, - CONTRACT_ORDER_ID_LOG_LABEL: if_object_matches_return_empty_string( - self.order_id, no_order_id - ), - INSTRUMENT_ORDER_ID_LABEL: if_object_matches_return_empty_string( - self.parent, no_parent - ), - } - ) - - return new_log - def log_attributes(self): """ Returns a dict of contract_order log attributes diff --git a/sysexecution/orders/instrument_orders.py b/sysexecution/orders/instrument_orders.py index 6f3539424e..69b905fdd7 100644 --- a/sysexecution/orders/instrument_orders.py +++ b/sysexecution/orders/instrument_orders.py @@ -228,33 +228,6 @@ def manual_trade(self): def roll_order(self): return bool(self.order_info["roll_order"]) - def log_with_attributes(self, log): - """ - Returns a new log object with instrument_order attributes added - - :param log: logger - :return: log - """ - new_log = log.setup( - strategy_name=self.strategy_name, - instrument_code=self.instrument_code, - instrument_order_id=if_object_matches_return_empty_string( - self.order_id, no_order_id - ), - ) - - new_log = log.setup( - **{ - STRATEGY_NAME_LOG_LABEL: self.strategy_name, - INSTRUMENT_CODE_LOG_LABEL: self.instrument_code, - INSTRUMENT_ORDER_ID_LABEL: if_object_matches_return_empty_string( - self.order_id, no_order_id - ), - } - ) - - return new_log - def log_attributes(self): """ Returns a dict of instrument_order log attributes diff --git a/sysexecution/stack_handler/fills.py b/sysexecution/stack_handler/fills.py index 0dc63739b7..d0d9ffe157 100644 --- a/sysexecution/stack_handler/fills.py +++ b/sysexecution/stack_handler/fills.py @@ -93,10 +93,11 @@ def apply_broker_order_fills_to_database( contract_order_id = broker_order.parent if contract_order_id is no_parent: - log = broker_order.log_with_attributes(self.log) - log.error( + self.log.error( "No parent for broker order %s %d" - % (str(broker_order), broker_order_id) + % (str(broker_order), broker_order_id), + **broker_order.log_attributes(), + method="temp", ) else: # pass broker fills upwards diff --git a/sysexecution/stack_handler/roll_orders.py b/sysexecution/stack_handler/roll_orders.py index f69d99a336..21e53b1a45 100644 --- a/sysexecution/stack_handler/roll_orders.py +++ b/sysexecution/stack_handler/roll_orders.py @@ -19,12 +19,7 @@ from sysproduction.data.prices import diagPrices from sysproduction.data.positions import updatePositions -from sysexecution.stack_handler.stackHandlerCore import ( - stackHandlerCore, - put_children_on_stack, - rollback_parents_and_children_and_handle_exceptions, - log_successful_adding, -) +from sysexecution.stack_handler.stackHandlerCore import stackHandlerCore from sysexecution.orders.contract_orders import contractOrder, best_order_type from sysexecution.orders.instrument_orders import zero_roll_order_type @@ -202,8 +197,7 @@ def add_instrument_and_list_of_contract_orders_to_stack( ): instrument_stack = self.instrument_stack contract_stack = self.contract_stack - # TODO log_with_attributes - parent_log = instrument_order.log_with_attributes(self.log) + log_attrs = {**instrument_order.log_attributes(), "method": "temp"} # Do as a transaction: if everything doesn't go to plan can roll back # We lock now, and @@ -214,9 +208,10 @@ def add_instrument_and_list_of_contract_orders_to_stack( ) except Exception as parent_order_error: - parent_log.warning( + self.log.warning( "Couldn't put parent order %s on instrument order stack error %s" - % (str(instrument_order), str(parent_order_error)) + % (str(instrument_order), str(parent_order_error)), + **log_attrs, ) instrument_order.unlock_order() return None @@ -238,9 +233,8 @@ def add_instrument_and_list_of_contract_orders_to_stack( # - a list of order IDS if all went well # - an empty list if error and rolled back, # - or an error something went wrong and couldn't rollback (the outer catch will try and rollback) - list_of_child_order_ids = put_children_on_stack( + list_of_child_order_ids = self.put_children_on_stack( child_stack=contract_stack, - parent_log=parent_log, list_of_child_orders=list_of_contract_orders, parent_order=instrument_order, ) @@ -266,25 +260,24 @@ def add_instrument_and_list_of_contract_orders_to_stack( # Roll back parent order and possibly children # At this point list_of_child_order_ids will either be empty (if succesful rollback) or contain child ids - rollback_parents_and_children_and_handle_exceptions( + self.rollback_parents_and_children_and_handle_exceptions( child_stack=contract_stack, parent_stack=instrument_stack, list_of_child_order_ids=list_of_child_order_ids, - parent_order_id=parent_order_id, + parent_order=instrument_order, error_from_adding_child_orders=error_from_adding_child_orders, - parent_log=parent_log, ) # phew got there - parent_log.debug( + self.log.debug( "Added parent order with ID %d %s to stack" - % (parent_order_id, str(instrument_order)) + % (parent_order_id, str(instrument_order)), + **log_attrs, ) - log_successful_adding( + self.log_successful_adding( list_of_child_orders=list_of_contract_orders, list_of_child_ids=list_of_child_order_ids, parent_order=instrument_order, - parent_log=parent_log, ) diff --git a/sysexecution/stack_handler/spawn_children_from_instrument_orders.py b/sysexecution/stack_handler/spawn_children_from_instrument_orders.py index 1ca2261bb9..9d745abf6b 100644 --- a/sysexecution/stack_handler/spawn_children_from_instrument_orders.py +++ b/sysexecution/stack_handler/spawn_children_from_instrument_orders.py @@ -26,17 +26,14 @@ from sysexecution.algos.allocate_algo_to_order import ( allocate_algo_to_list_of_contract_orders, ) -from sysexecution.stack_handler.stackHandlerCore import ( - stackHandlerCore, - put_children_on_stack, - add_children_to_parent_or_rollback_children, - log_successful_adding, -) +from sysexecution.stack_handler.stackHandlerCore import stackHandlerCore from sysexecution.stack_handler.roll_orders import ( auto_update_roll_status, is_order_reducing_order, ) +contractIdAndTrade = namedtuple("contractIDAndTrade", ["contract_id", "trade"]) + class stackHandlerForSpawning(stackHandlerCore): def spawn_children_from_new_instrument_orders(self): @@ -59,8 +56,8 @@ def spawn_children_from_instrument_order_id(self, instrument_order_id: int): # log.debug("Instrument is locked, not spawning order") return None - list_of_contract_orders = spawn_children_from_instrument_order( - self.data, instrument_order + list_of_contract_orders = self.spawn_children_from_instrument_order( + instrument_order ) if len(list_of_contract_orders) > 0: @@ -84,232 +81,222 @@ def add_children_to_stack_and_child_id_to_parent( parent_order: Order, list_of_child_orders: listOfOrders, ): - # TODO log_with_attributes - parent_log = parent_order.log_with_attributes(self.log) - - list_of_child_ids = put_children_on_stack( + list_of_child_ids = self.put_children_on_stack( child_stack=child_stack, list_of_child_orders=list_of_child_orders, - parent_log=parent_log, parent_order=parent_order, ) if len(list_of_child_ids) == 0: return None - success_or_failure = add_children_to_parent_or_rollback_children( + success_or_failure = self.add_children_to_parent_or_rollback_children( child_stack=child_stack, parent_order=parent_order, - parent_log=parent_log, parent_stack=parent_stack, list_of_child_ids=list_of_child_ids, ) if success_or_failure is success: - log_successful_adding( + self.log_successful_adding( list_of_child_orders=list_of_child_orders, list_of_child_ids=list_of_child_ids, parent_order=parent_order, - parent_log=parent_log, ) - -def spawn_children_from_instrument_order( - data: dataBlob, instrument_order: instrumentOrder -): - auto_update_roll_status(data=data, instrument_code=instrument_order.instrument_code) - spawn_function = function_to_process_instrument(instrument_order.instrument_code) - list_of_contract_orders = spawn_function(data, instrument_order) - list_of_contract_orders = allocate_algo_to_list_of_contract_orders( - data, list_of_contract_orders, instrument_order - ) - - return list_of_contract_orders - - -def function_to_process_instrument(instrument_code: str) -> Callable: - """ - FIX ME in future this will handle spread orders, but for now is only for 'single instruments' - - We can get spread trades from rolls but these are not processed here - - :param instrument_code: - :return: function - """ - function_dict = dict( - single_instrument=single_instrument_child_orders, - inter_market=inter_market_instrument_child_orders, - intra_market=intra_market_instrument_child_orders, - ) - instrument_type = "single_instrument" - - required_function = function_dict[instrument_type] - - return required_function - - -def single_instrument_child_orders( - data: dataBlob, instrument_order: instrumentOrder -) -> listOfOrders: - """ - Generate child orders for a single instrument (not rolls) - - :param data: dataBlob. Required as uses roll data to determine appropriate instrument - :param instrument_order: - :return: A list of contractOrders to submit to the stack - """ - # We don't allow zero trades to be spawned - # Zero trades can enter the instrument stack, where they can potentially - # modify existing trades - if instrument_order.is_zero_trade(): - return listOfOrders([]) - - # Get required contract(s) depending on roll status - list_of_child_contract_dates_and_trades = ( - get_required_contract_trade_for_instrument(data, instrument_order) - ) - - raw_list_of_contract_orders = ( - list_of_contract_orders_from_list_of_child_date_and_trade( - instrument_order, list_of_child_contract_dates_and_trades + def spawn_children_from_instrument_order(self, instrument_order: instrumentOrder): + auto_update_roll_status( + data=self.data, instrument_code=instrument_order.instrument_code + ) + spawn_function = self.function_to_process_instrument( + instrument_order.instrument_code + ) + list_of_contract_orders = spawn_function(instrument_order) + list_of_contract_orders = allocate_algo_to_list_of_contract_orders( + self.data, list_of_contract_orders, instrument_order ) - ) - list_of_contract_orders = adjust_limit_orders_with_correct_prices( - data=data, - instrument_order=instrument_order, - list_of_contract_orders=raw_list_of_contract_orders, - ) + return list_of_contract_orders - return list_of_contract_orders + def function_to_process_instrument(self, instrument_code: str) -> Callable: + """ + FIX ME in future this will handle spread orders, but for now is only for 'single instruments' + We can get spread trades from rolls but these are not processed here -def adjust_limit_orders_with_correct_prices( - data: dataBlob, - list_of_contract_orders: listOfOrders, - instrument_order: instrumentOrder, -) -> listOfOrders: - # Get reference price for relevant contract(s) - # used for TCA - # Adjust price if reference contract is different from required contract - list_of_contract_orders_with_adjusted_reference_prices = ( - calculate_reference_prices_for_direct_child_orders( - data, instrument_order, list_of_contract_orders + :param instrument_code: + :return: function + """ + function_dict = dict( + single_instrument=self.single_instrument_child_orders, + inter_market=inter_market_instrument_child_orders, + intra_market=intra_market_instrument_child_orders, ) - ) - - # Now get the limit prices, where relevant - # Adjust limit price if limit_contract is different from required contract - list_of_contract_orders_with_adjusted_limit_prices = ( - calculate_limit_prices_for_direct_child_orders( - data, - instrument_order, - list_of_contract_orders_with_adjusted_reference_prices, + instrument_type = "single_instrument" + + required_function = function_dict[instrument_type] + + return required_function + + def single_instrument_child_orders( + self, instrument_order: instrumentOrder + ) -> listOfOrders: + """ + Generate child orders for a single instrument (not rolls) + + :param instrument_order: + :return: A list of contractOrders to submit to the stack + """ + # We don't allow zero trades to be spawned + # Zero trades can enter the instrument stack, where they can potentially + # modify existing trades + if instrument_order.is_zero_trade(): + return listOfOrders([]) + + # Get required contract(s) depending on roll status + list_of_child_contract_dates_and_trades = ( + self.get_required_contract_trade_for_instrument(instrument_order) ) - ) - - return list_of_contract_orders_with_adjusted_limit_prices + raw_list_of_contract_orders = ( + list_of_contract_orders_from_list_of_child_date_and_trade( + instrument_order, list_of_child_contract_dates_and_trades + ) + ) -contractIdAndTrade = namedtuple("contractIDAndTrade", ["contract_id", "trade"]) + list_of_contract_orders = self.adjust_limit_orders_with_correct_prices( + instrument_order=instrument_order, + list_of_contract_orders=raw_list_of_contract_orders, + ) + return list_of_contract_orders -def get_required_contract_trade_for_instrument( - data: dataBlob, instrument_order: instrumentOrder -) -> list: - """ - Return the contract to trade for a given instrument + def adjust_limit_orders_with_correct_prices( + self, + list_of_contract_orders: listOfOrders, + instrument_order: instrumentOrder, + ) -> listOfOrders: + # Get reference price for relevant contract(s) + # used for TCA + # Adjust price if reference contract is different from required contract + list_of_contract_orders_with_adjusted_reference_prices = ( + calculate_reference_prices_for_direct_child_orders( + self.data, instrument_order, list_of_contract_orders + ) + ) - Depends on roll status and trade vs position: - - roll_states = ['No_Roll', 'Passive', 'Force', 'Force_Outright', 'Roll_Adjusted'] + # Now get the limit prices, where relevant + # Adjust limit price if limit_contract is different from required contract + list_of_contract_orders_with_adjusted_limit_prices = ( + calculate_limit_prices_for_direct_child_orders( + self.data, + instrument_order, + list_of_contract_orders_with_adjusted_reference_prices, + ) + ) - If 'No Roll' then trade current contract (also 'No Open', since constraint applied upstream) - If 'Passive', and no position in current contract: trade next contract - If 'Passive', and reducing trade which leaves zero or something in current contract: trade current contract - If 'Passive', and reducing trade which is larger than current contract position: trade current and next contract - If 'Passive', and increasing trade: trade next contract - If 'Force' or 'Force Outright' or 'Roll_Adjusted' or 'Close': don't trade + return list_of_contract_orders_with_adjusted_limit_prices + def get_required_contract_trade_for_instrument( + self, instrument_order: instrumentOrder + ) -> list: + """ + Return the contract to trade for a given instrument - :param instrument_order: - :param data: dataBlog - :return: tuple: list of child orders: each is a tuple: contract str or missing_contract, trade int - """ - instrument_code = instrument_order.instrument_code - # TODO log_with_attributes - log = instrument_order.log_with_attributes(data.log) + Depends on roll status and trade vs position: + - roll_states = ['No_Roll', 'Passive', 'Force', 'Force_Outright', 'Roll_Adjusted'] - trade = instrument_order.as_single_trade_qty_or_error() - if trade is missing_order: - log.critical("Instrument order can't be a spread order") - return [] + If 'No Roll' then trade current contract (also 'No Open', since constraint applied upstream) + If 'Passive', and no position in current contract: trade next contract + If 'Passive', and reducing trade which leaves zero or something in current contract: trade current contract + If 'Passive', and reducing trade which is larger than current contract position: trade current and next contract + If 'Passive', and increasing trade: trade next contract + If 'Force' or 'Force Outright' or 'Roll_Adjusted' or 'Close': don't trade - diag_positions = diagPositions(data) - if diag_positions.is_roll_state_no_roll( - instrument_code - ) or diag_positions.is_roll_state_no_open(instrument_code): - ## trade normally - ## any increasing trades would have been weeded out earlier by strategy order handler + :param instrument_order: + :return: tuple: list of child orders: each is a tuple: contract str or missing_contract, trade int + """ + instrument_code = instrument_order.instrument_code + log_attrs = {**instrument_order.log_attributes(), "method": "temp"} - return child_order_in_priced_contract_only( - data=data, - instrument_order=instrument_order, - log=log, - ) + trade = instrument_order.as_single_trade_qty_or_error() + if trade is missing_order: + self.data.log.critical( + "Instrument order can't be a spread order", **log_attrs + ) + return [] - elif diag_positions.is_roll_state_passive(instrument_code): - # no log as function does it - return passive_roll_child_order(data=data, instrument_order=instrument_order) + diag_positions = diagPositions(self.data) - elif diag_positions.is_roll_state_close( - instrument_code - ) or diag_positions.is_roll_state_adjusted(instrument_code): - ## do nothing - return [] + if diag_positions.is_roll_state_no_roll( + instrument_code + ) or diag_positions.is_roll_state_no_open(instrument_code): + ## trade normally + ## any increasing trades would have been weeded out earlier by strategy order handler - elif diag_positions.is_double_sided_trade_roll_state(instrument_code): - order_reduces_positions = is_order_reducing_order( - data=data, order=instrument_order - ) - if order_reduces_positions: - log.debug( - "Order %s reduces position, so trading as a passive roll even though roll status is %s" - % ( - str(instrument_order), - diag_positions.get_roll_state(instrument_code), - ) + return self.child_order_in_priced_contract_only( + # data=data, + instrument_order=instrument_order, ) + + elif diag_positions.is_roll_state_passive(instrument_code): + # no log as function does it return passive_roll_child_order( - data=data, instrument_order=instrument_order + data=self.data, instrument_order=instrument_order ) - else: + + elif diag_positions.is_roll_state_close( + instrument_code + ) or diag_positions.is_roll_state_adjusted(instrument_code): ## do nothing return [] - else: - log.critical( - "Roll state %s not understood: can't generate trade for %s" - % ( - diag_positions.get_name_of_roll_state(instrument_code), - str(instrument_order), + elif diag_positions.is_double_sided_trade_roll_state(instrument_code): + order_reduces_positions = is_order_reducing_order( + data=self.data, order=instrument_order ) - ) + if order_reduces_positions: + self.data.log.debug( + "Order %s reduces position, so trading as a passive roll even though roll status is %s" + % ( + str(instrument_order), + diag_positions.get_roll_state(instrument_code), + ), + **log_attrs, + ) + return passive_roll_child_order( + data=self.data, instrument_order=instrument_order + ) + else: + ## do nothing + return [] - return [] + else: + self.data.log.critical( + "Roll state %s not understood: can't generate trade for %s" + % ( + diag_positions.get_name_of_roll_state(instrument_code), + str(instrument_order), + ), + **log_attrs, + ) + return [] -def child_order_in_priced_contract_only( # TODO passed logger instance - data: dataBlob, instrument_order: instrumentOrder, log -): - diag_contracts = dataContracts(data) - instrument_code = instrument_order.instrument_code - current_contract = diag_contracts.get_priced_contract_id(instrument_code) - trade = instrument_order.as_single_trade_qty_or_error() - log.debug( - "No roll, allocating entire order %s to current contract %s" - % (str(instrument_order), current_contract) - ) - return [contractIdAndTrade(current_contract, trade)] + def child_order_in_priced_contract_only( + self, + instrument_order: instrumentOrder, + ): + instrument_code = instrument_order.instrument_code + current_contract = self.data_contracts.get_priced_contract_id(instrument_code) + trade = instrument_order.as_single_trade_qty_or_error() + self.log.debug( + "No roll, allocating entire order %s to current contract %s" + % (str(instrument_order), current_contract), + **instrument_order.log_attributes(), + method="temp", + ) + return [contractIdAndTrade(current_contract, trade)] def passive_roll_child_order( diff --git a/sysexecution/stack_handler/stackHandlerCore.py b/sysexecution/stack_handler/stackHandlerCore.py index 90c232bc9d..59d70f4372 100644 --- a/sysexecution/stack_handler/stackHandlerCore.py +++ b/sysexecution/stack_handler/stackHandlerCore.py @@ -94,107 +94,114 @@ def update_prices(self) -> updatePrices: return update_prices + def put_children_on_stack( + self, + child_stack: orderStackData, + parent_order: Order, + list_of_child_orders: listOfOrders, + ) -> list: + log_attrs = {**parent_order.log_attributes(), "method": "temp"} -def put_children_on_stack( # TODO passed logger instance - child_stack: orderStackData, - parent_order: Order, - list_of_child_orders: listOfOrders, - parent_log, -) -> list: - try: - list_of_child_ids = child_stack.put_list_of_orders_on_stack( - list_of_child_orders - ) - except failureWithRollback as e: - parent_log.warning( - "Tried to add child orders but %s; rolled back so can try again (parent %s)" - % (str(e), str(parent_order)) - ) - return [] - - except Exception as e: - parent_log.critical( - "Tried to add child orders, error %s and couldn't roll back! Order stack may well be corrupted!" - % str(e) - ) - return [] - - return list_of_child_ids - - -def add_children_to_parent_or_rollback_children( # TODO passed logger instance - parent_order: Order, - list_of_child_ids: list, - parent_stack: orderStackData, - child_stack: orderStackData, - parent_log, -): - try: - parent_stack.add_children_to_order_without_existing_children( - parent_order.order_id, list_of_child_ids - ) - except Exception as e: try: - child_stack.rollback_list_of_orders_on_stack(list_of_child_ids) - parent_log.warning( - "Tried to add child orders to parent but %s; rolled back so can try again (parent %s)" - % (str(e), str(parent_order)) + list_of_child_ids = child_stack.put_list_of_orders_on_stack( + list_of_child_orders ) - return failure - except: - parent_log.critical( - "Tried to add child orders, error %s and couldn't roll back! Order stack may well be corrupted!" - % str(e) + except failureWithRollback as e: + self.log.warning( + "Tried to add child orders but %s; rolled back so can try again (parent %s)" + % (str(e), str(parent_order)), + **log_attrs, ) - return failure + return [] - return success + except Exception as e: + self.log.critical( + "Tried to add child orders, error %s and couldn't roll back! Order stack may well be corrupted!" + % str(e), + **log_attrs, + ) + return [] + return list_of_child_ids -def log_successful_adding( # TODO passed logger instance - list_of_child_orders: listOfOrders, - list_of_child_ids: list, - parent_order: Order, - parent_log, -): - for child_order, child_id in zip(list_of_child_orders, list_of_child_ids): - # TODO log_with_attributes - child_log = child_order.log_with_attributes(parent_log) - child_log.debug( - "Put child order %s on stack with ID %d from parent order %s" - % (str(child_order), child_id, str(parent_order)) - ) + def add_children_to_parent_or_rollback_children( + self, + parent_order: Order, + list_of_child_ids: list, + parent_stack: orderStackData, + child_stack: orderStackData, + ): + log_attrs = {**parent_order.log_attributes(), "method": "temp"} + try: + parent_stack.add_children_to_order_without_existing_children( + parent_order.order_id, list_of_child_ids + ) + except Exception as e: + try: + child_stack.rollback_list_of_orders_on_stack(list_of_child_ids) + self.log.warning( + "Tried to add child orders to parent but %s; rolled back so can try again (parent %s)" + % (str(e), str(parent_order)), + **log_attrs, + ) + return failure + except: + self.log.critical( + "Tried to add child orders, error %s and couldn't roll back! Order stack may well be corrupted!" + % str(e), + **log_attrs, + ) + return failure + + return success + + def log_successful_adding( + self, + list_of_child_orders: listOfOrders, + list_of_child_ids: list, + parent_order: Order, + ): + for child_order, child_id in zip(list_of_child_orders, list_of_child_ids): + self.log.debug( + "Put child order %s on stack with ID %d from parent order %s" + % (str(child_order), child_id, str(parent_order)), + **child_order.log_attributes(), + method="temp", + ) -def rollback_parents_and_children_and_handle_exceptions( - parent_stack: orderStackData, - child_stack: orderStackData, - parent_order_id: int, - list_of_child_order_ids: list, - parent_log, - error_from_adding_child_orders: Exception, -): - ## - try: - rollback_parents_and_children( - child_stack=child_stack, - parent_stack=parent_stack, - list_of_child_order_ids=list_of_child_order_ids, - parent_order_id=parent_order_id, - ) - parent_log.warning( - "Error %s when adding a set of parents and children but managed to rollback" - % str(error_from_adding_child_orders) - ) - return None - - except Exception as rollback_exception: - ## bloody hell even the rollback has failed, throw everything out of the pram - parent_log.critical( - "Error %s when adding a set of parents and children and couldn't rollback got error %s! Stack may be corrupted" - % (str(error_from_adding_child_orders), str(rollback_exception)) - ) - return None + def rollback_parents_and_children_and_handle_exceptions( + self, + parent_stack: orderStackData, + child_stack: orderStackData, + parent_order: Order, + list_of_child_order_ids: list, + error_from_adding_child_orders: Exception, + ): + ## + log_attrs = {**parent_order.log_attributes(), "method": "temp"} + try: + rollback_parents_and_children( + child_stack=child_stack, + parent_stack=parent_stack, + list_of_child_order_ids=list_of_child_order_ids, + parent_order_id=parent_order.order_id, + ) + self.log.warning( + "Error %s when adding a set of parents and children but managed to rollback" + % str(error_from_adding_child_orders), + **log_attrs, + ) + return None + + except Exception as rollback_exception: + ## bloody hell even the rollback has failed, throw everything out of the pram + self.log.critical( + "Error %s when adding a set of parents and children and couldn't rollback got error %s! Stack may be corrupted" + % (str(error_from_adding_child_orders), str(rollback_exception)), + **log_attrs, + ) + return None def rollback_parents_and_children( diff --git a/syslogging/adapter.py b/syslogging/adapter.py index 93fb681bbe..fbfc3874cd 100644 --- a/syslogging/adapter.py +++ b/syslogging/adapter.py @@ -6,12 +6,6 @@ class DynamicAttributeLogger(logging.LoggerAdapter): - - """ - # TODO log_with_attributes - - """ - def __init__(self, logger, attributes) -> None: self._check_attributes(attributes) super().__init__(logger, attributes) From b3519ff34e99a18c44865122c2567177bec11a82 Mon Sep 17 00:00:00 2001 From: Andy Geach Date: Sat, 9 Dec 2023 10:53:23 +0000 Subject: [PATCH 181/235] refactor away uses of log.setup() in fx code --- sysdata/parquet/parquet_spotfx_prices.py | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/sysdata/parquet/parquet_spotfx_prices.py b/sysdata/parquet/parquet_spotfx_prices.py index aa597cbdc5..616751c83d 100644 --- a/sysdata/parquet/parquet_spotfx_prices.py +++ b/sysdata/parquet/parquet_spotfx_prices.py @@ -41,17 +41,17 @@ def _get_fx_prices_without_checking(self, currency_code: str) -> fxPrices: return fx_prices def _delete_fx_prices_without_any_warning_be_careful(self, currency_code: str): - log = self.log.setup(**{CURRENCY_CODE_LOG_LABEL: currency_code}) self.parquet.delete_data_given_data_type_and_identifier( data_type=SPOTFX_COLLECTION, identifier=currency_code ) - log.debug("Deleted fX prices for %s from %s" % (currency_code, str(self))) + self.log.debug( + "Deleted fX prices for %s from %s" % (currency_code, str(self)), + **{CURRENCY_CODE_LOG_LABEL: currency_code, "method": "temp"}, + ) def _add_fx_prices_without_checking_for_existing_entry( self, currency_code: str, fx_price_data: fxPrices ): - log = self.log.setup(**{CURRENCY_CODE_LOG_LABEL: currency_code}) - fx_price_data_aspd = pd.DataFrame(fx_price_data) fx_price_data_aspd.columns = ["price"] fx_price_data_aspd = fx_price_data_aspd.astype(float) @@ -61,7 +61,8 @@ def _add_fx_prices_without_checking_for_existing_entry( identifier=currency_code, data_to_write=fx_price_data_aspd, ) - log.debug( + self.log.debug( "Wrote %s lines of prices for %s to %s" - % (len(fx_price_data), currency_code, str(self)) + % (len(fx_price_data), currency_code, str(self)), + **{CURRENCY_CODE_LOG_LABEL: currency_code, "method": "temp"}, ) From c5bd4515c3ca69eaa986efd09aa593851d79ce04 Mon Sep 17 00:00:00 2001 From: Mendel Friedman <77807315+meldinman@users.noreply.github.com> Date: Mon, 18 Dec 2023 21:39:39 -0500 Subject: [PATCH 182/235] Update vol.py --- sysquant/estimators/vol.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/sysquant/estimators/vol.py b/sysquant/estimators/vol.py index 8c4ff6b0d0..9fe9b8642a 100644 --- a/sysquant/estimators/vol.py +++ b/sysquant/estimators/vol.py @@ -112,8 +112,10 @@ def apply_vol_floor( def backfill_vol(vol: pd.Series) -> pd.Series: # have to fill forwards first, as it's only the start we want to # backfill, eg before any value available + vol_forward_fill = vol.ffill() + vol_backfilled = vol_forward_fill.bfill() - return vol.ffill().bfill() + return vol_backfilled def mixed_vol_calc( From 1027f63c90e4a36217cd920023fdd853414d8592 Mon Sep 17 00:00:00 2001 From: yuntai Date: Wed, 20 Dec 2023 07:36:19 +0900 Subject: [PATCH 183/235] Formula fixes in risk report header --- sysproduction/reporting/instrument_risk_report.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/sysproduction/reporting/instrument_risk_report.py b/sysproduction/reporting/instrument_risk_report.py index 67f3d48f0a..ad651a1784 100644 --- a/sysproduction/reporting/instrument_risk_report.py +++ b/sysproduction/reporting/instrument_risk_report.py @@ -9,8 +9,8 @@ + "A- daily_price_stdev: Standard deviation, price points, per day\n" + "B- annual_price_stdev: Standard deviation, price points, per year =A*16 \n" + "C- price: Price \n" - + "D- daily_perc_stdev: Standard deviation, percentage (1=1%), per day =A*C \n" - + "E- annual_perc_stdev: Standard deviation, percentage (1=1%), per year = B*C = D*16 \n" + + "D- daily_perc_stdev: Standard deviation, percentage (1=1%), per day =A/C \n" + + "E- annual_perc_stdev: Standard deviation, percentage (1=1%), per year = B/C = D*16 \n" + "F- point_size_base: Futures multiplier in base (account) currency \n" + "G- contract_exposure: Notional value of one contract = F*C \n" + "H- annual_risk_per_contract: Standard deviation, base currency, per year = B * F = E * G" From edbb6c1be079409e0a78c3dfb61274e146a1a4de Mon Sep 17 00:00:00 2001 From: Rob Carver Date: Wed, 20 Dec 2023 13:42:42 +0000 Subject: [PATCH 184/235] fix --- syscore/pandas/pdutils.py | 3 ++- sysdata/sim/futures_sim_data.py | 2 ++ sysdata/sim/futures_sim_data_with_data_blob.py | 2 +- systems/rawdata.py | 14 ++++++-------- 4 files changed, 11 insertions(+), 10 deletions(-) diff --git a/syscore/pandas/pdutils.py b/syscore/pandas/pdutils.py index 24f06f81fe..272af80866 100755 --- a/syscore/pandas/pdutils.py +++ b/syscore/pandas/pdutils.py @@ -15,6 +15,7 @@ FALLBACK_DATE_FORMAT_FOR_CSV = "%Y-%m-%d" + def rolling_pairwise_correlation( x: pd.DataFrame, periods: int, min_periods: int = 3 ) -> pd.Series: @@ -113,7 +114,7 @@ def pd_readcsv( df=df, date_index_name=date_index_name, date_format=date_format ) except: - df =add_datetime_index( + df = add_datetime_index( df=df, date_index_name=date_index_name, date_format=fallback_date_format ) diff --git a/sysdata/sim/futures_sim_data.py b/sysdata/sim/futures_sim_data.py index a17a6c3444..d46e069a39 100644 --- a/sysdata/sim/futures_sim_data.py +++ b/sysdata/sim/futures_sim_data.py @@ -75,6 +75,8 @@ def get_raw_price_from_start_date( :return: price """ price = self.get_backadjusted_futures_price(instrument_code) + if len(price)==0: + raise Exception("Instrument code %s has no data!" % instrument_code) return price[start_date:] diff --git a/sysdata/sim/futures_sim_data_with_data_blob.py b/sysdata/sim/futures_sim_data_with_data_blob.py index dbff988846..0e0f5f1193 100644 --- a/sysdata/sim/futures_sim_data_with_data_blob.py +++ b/sysdata/sim/futures_sim_data_with_data_blob.py @@ -71,7 +71,7 @@ def get_multiple_prices_from_start_date( self, instrument_code: str, start_date ) -> futuresMultiplePrices: data = self.db_futures_multiple_prices_data.get_multiple_prices(instrument_code) - if len(data)==0: + if len(data) == 0: raise missingData( "Data for %s not found! Remove from instrument list, or add to config.ignore_instruments" % instrument_code diff --git a/systems/rawdata.py b/systems/rawdata.py index 27afc40c0f..feebebaeee 100644 --- a/systems/rawdata.py +++ b/systems/rawdata.py @@ -345,7 +345,6 @@ def _aggregate_daily_vol_normalised_returns_for_list_of_instruments( def _daily_vol_normalised_price_for_list_of_instruments( self, list_of_instruments: list ) -> pd.Series: - norm_returns = ( self._aggregate_daily_vol_normalised_returns_for_list_of_instruments( list_of_instruments @@ -385,25 +384,24 @@ def daily_vol_normalised_price_for_asset_class_with_redundant_instrument_code( :return: pd.Series """ - return self._by_asset_class_daily_vol_normalised_price_for_asset_class(asset_class) + return self._by_asset_class_daily_vol_normalised_price_for_asset_class( + asset_class + ) @diagnostic() def system_with_redundant_instrument_code_passed( - self,instrument_code: str, asset_class: str + self, instrument_code: str, asset_class: str ): ## allows ultimate flexibility when creating trading rules but be careful! return self.parent @diagnostic() - def instrument_code( - self,instrument_code: str - ) -> pd.Series: + def instrument_code(self, instrument_code: str) -> pd.Series: ## allows ultimate flexibility when creating trading rules return instrument_code - @output() def normalised_price_for_asset_class(self, instrument_code: str) -> pd.Series: """ @@ -733,4 +731,4 @@ def instrument_list(self) -> list: if __name__ == "__main__": import doctest - doctest.testmod() \ No newline at end of file + doctest.testmod() From f43d7315b9ec552d2b4cdf571e74ee6835452ce8 Mon Sep 17 00:00:00 2001 From: Andy Geach Date: Fri, 22 Dec 2023 13:45:21 +0000 Subject: [PATCH 185/235] black --- sysdata/sim/futures_sim_data.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sysdata/sim/futures_sim_data.py b/sysdata/sim/futures_sim_data.py index d46e069a39..b2eebdf8da 100644 --- a/sysdata/sim/futures_sim_data.py +++ b/sysdata/sim/futures_sim_data.py @@ -75,7 +75,7 @@ def get_raw_price_from_start_date( :return: price """ price = self.get_backadjusted_futures_price(instrument_code) - if len(price)==0: + if len(price) == 0: raise Exception("Instrument code %s has no data!" % instrument_code) return price[start_date:] From 8078f17a2d1a3e5a1923e352968311efdcd5b68c Mon Sep 17 00:00:00 2001 From: Rob Carver Date: Wed, 10 Jan 2024 12:30:46 +0000 Subject: [PATCH 186/235] repointing data refs to rawdata refs to make it easier to override --- systems/accounts/account_inputs.py | 6 +++--- systems/rawdata.py | 3 +++ 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/systems/accounts/account_inputs.py b/systems/accounts/account_inputs.py index 6ebf4b5193..17104bf629 100644 --- a/systems/accounts/account_inputs.py +++ b/systems/accounts/account_inputs.py @@ -58,10 +58,10 @@ def instrument_prices_for_position_or_forecast_infer_frequency( return instrument_prices def get_daily_prices(self, instrument_code: str) -> pd.Series: - return self.parent.data.daily_prices(instrument_code) + return self.parent.rawdata.get_daily_prices(instrument_code) def get_hourly_prices(self, instrument_code: str) -> pd.Series: - return self.parent.data.hourly_prices(instrument_code) + return self.parent.rawdata.get_hourly_prices(instrument_code) def get_capped_forecast( self, instrument_code: str, rule_variation_name: str @@ -108,7 +108,7 @@ def forecast_cap(self) -> float: return self.config.forecast_cap def get_raw_cost_data(self, instrument_code: str) -> instrumentCosts: - return self.parent.data.get_raw_cost_data(instrument_code) + return self.parent.rawdata.get_raw_cost_data(instrument_code) def get_rolls_per_year(self, instrument_code: str) -> int: rolls_per_year = self.parent.rawdata.rolls_per_year(instrument_code) diff --git a/systems/rawdata.py b/systems/rawdata.py index feebebaeee..572f2a2e6b 100644 --- a/systems/rawdata.py +++ b/systems/rawdata.py @@ -42,6 +42,9 @@ def data_stage(self) -> futuresSimData: def config(self) -> Config: return self.parent.config + def get_raw_cost_data(self, instrument_code: str): + return self.data_stage.get_raw_cost_data(instrument_code) + @input def get_daily_prices(self, instrument_code) -> pd.Series: """ From 949a58c6a40515accc05fa53578b303720491fa1 Mon Sep 17 00:00:00 2001 From: Rob Carver Date: Wed, 10 Jan 2024 12:52:16 +0000 Subject: [PATCH 187/235] repointing data refs to rawdata refs to make it easier to override --- systems/accounts/account_inputs.py | 2 +- systems/rawdata.py | 4 ++++ 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/systems/accounts/account_inputs.py b/systems/accounts/account_inputs.py index 17104bf629..7d7144deea 100644 --- a/systems/accounts/account_inputs.py +++ b/systems/accounts/account_inputs.py @@ -116,7 +116,7 @@ def get_rolls_per_year(self, instrument_code: str) -> int: return rolls_per_year def get_value_of_block_price_move(self, instrument_code: str) -> float: - return self.parent.data.get_value_of_block_price_move(instrument_code) + return self.parent.rawdata.get_value_of_block_price_move(instrument_code) def get_fx_rate(self, instrument_code: str) -> pd.Series: return self.parent.positionSize.get_fx_rate(instrument_code) diff --git a/systems/rawdata.py b/systems/rawdata.py index 572f2a2e6b..5970ca6d62 100644 --- a/systems/rawdata.py +++ b/systems/rawdata.py @@ -45,6 +45,10 @@ def config(self) -> Config: def get_raw_cost_data(self, instrument_code: str): return self.data_stage.get_raw_cost_data(instrument_code) + def get_value_of_block_price_move(self, instrument_code: str) -> float: + return self.data_stage.get_value_of_block_price_move(instrument_code) + + @input def get_daily_prices(self, instrument_code) -> pd.Series: """ From 40dcdedc50e3c4e0e4f48e8b352bacb793bb736b Mon Sep 17 00:00:00 2001 From: Rob Carver Date: Wed, 10 Jan 2024 13:02:02 +0000 Subject: [PATCH 188/235] repointing data refs to rawdata refs to make it easier to override --- systems/positionsizing.py | 4 ++-- systems/rawdata.py | 4 ++++ 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/systems/positionsizing.py b/systems/positionsizing.py index b7f45548f5..c4e1509738 100644 --- a/systems/positionsizing.py +++ b/systems/positionsizing.py @@ -318,7 +318,7 @@ def get_block_value(self, instrument_code: str) -> pd.Series: """ underlying_price = self.get_underlying_price(instrument_code) - value_of_price_move = self.parent.data.get_value_of_block_price_move( + value_of_price_move = self.rawdata_stage.get_value_of_block_price_move( instrument_code ) @@ -535,7 +535,7 @@ def get_fx_rate(self, instrument_code: str) -> pd.Series: """ base_currency = self.get_base_currency() - fx_rate = self.data.get_fx_for_instrument(instrument_code, base_currency) + fx_rate = self.rawdata_stage.get_fx_for_instrument(instrument_code, base_currency) return fx_rate diff --git a/systems/rawdata.py b/systems/rawdata.py index 5970ca6d62..1a5f2ea89b 100644 --- a/systems/rawdata.py +++ b/systems/rawdata.py @@ -48,6 +48,10 @@ def get_raw_cost_data(self, instrument_code: str): def get_value_of_block_price_move(self, instrument_code: str) -> float: return self.data_stage.get_value_of_block_price_move(instrument_code) + def get_fx_for_instrument( + self, instrument_code: str, base_currency: str + ): + return self.data_stage.get_fx_for_instrument(instrument_code=instrument_code, base_currency=base_currency) @input def get_daily_prices(self, instrument_code) -> pd.Series: From 101145948b757076460ed6c3bbecb6cf2b09eed7 Mon Sep 17 00:00:00 2001 From: Rob Carver Date: Wed, 10 Jan 2024 13:08:18 +0000 Subject: [PATCH 189/235] black --- systems/positionsizing.py | 4 +++- systems/rawdata.py | 8 ++++---- 2 files changed, 7 insertions(+), 5 deletions(-) diff --git a/systems/positionsizing.py b/systems/positionsizing.py index c4e1509738..7bd858622d 100644 --- a/systems/positionsizing.py +++ b/systems/positionsizing.py @@ -535,7 +535,9 @@ def get_fx_rate(self, instrument_code: str) -> pd.Series: """ base_currency = self.get_base_currency() - fx_rate = self.rawdata_stage.get_fx_for_instrument(instrument_code, base_currency) + fx_rate = self.rawdata_stage.get_fx_for_instrument( + instrument_code, base_currency + ) return fx_rate diff --git a/systems/rawdata.py b/systems/rawdata.py index 1a5f2ea89b..bb7992dc2a 100644 --- a/systems/rawdata.py +++ b/systems/rawdata.py @@ -48,10 +48,10 @@ def get_raw_cost_data(self, instrument_code: str): def get_value_of_block_price_move(self, instrument_code: str) -> float: return self.data_stage.get_value_of_block_price_move(instrument_code) - def get_fx_for_instrument( - self, instrument_code: str, base_currency: str - ): - return self.data_stage.get_fx_for_instrument(instrument_code=instrument_code, base_currency=base_currency) + def get_fx_for_instrument(self, instrument_code: str, base_currency: str): + return self.data_stage.get_fx_for_instrument( + instrument_code=instrument_code, base_currency=base_currency + ) @input def get_daily_prices(self, instrument_code) -> pd.Series: From 14acfb811516979439ef537e76b11b74e9759cc1 Mon Sep 17 00:00:00 2001 From: Andy Geach Date: Thu, 11 Jan 2024 11:51:29 +0000 Subject: [PATCH 190/235] remove references to log.setup() from comments and messages --- sysdata/data_blob.py | 36 ++++++++++++++++-------------------- 1 file changed, 16 insertions(+), 20 deletions(-) diff --git a/sysdata/data_blob.py b/sysdata/data_blob.py index d54f348fb8..2bc1c01594 100644 --- a/sysdata/data_blob.py +++ b/sysdata/data_blob.py @@ -34,11 +34,9 @@ def __init__( .... sets up the following equivalencies: - data.broker_contract_price = ibFuturesContractPriceData(ib_conn, log=log.setup(component="IB-price-data")) - data.db_futures_contract_price = arcticFuturesContractPriceData(mongo_db=mongo_db, - log=log.setup(component="arcticFuturesContractPriceData")) - data.db_futures_contract = mongoFuturesContractData(mongo_db=mongo_db, - log = log.setup(component="mongoFuturesContractData")) + data.broker_contract_price = ibFuturesContractPriceData(ib_conn) + data.db_futures_contract_price = arcticFuturesContractPriceData(mongo_db=mongo_db) + data.db_futures_contract = mongoFuturesContractData(mongo_db=mongo_db) This abstracts the precise data source @@ -50,11 +48,9 @@ def __init__( .... sets up the following equivalencies. This is useful if you are copying from one source to another - data.ib_contract_price = ibFuturesContractPriceData(ib_conn, log=log.setup(component="IB-price-data")) - data.arctic_futures_contract_price = arcticFuturesContractPriceData(mongo_db=mongo_db, - log=log.setup(component="arcticFuturesContractPriceData")) - data.mongo_futures_contract = mongoFuturesContractData(mongo_db=mongo_db, - log = log.setup(component="mongoFuturesContractData")) + data.ib_contract_price = ibFuturesContractPriceData(ib_conn) + data.arctic_futures_contract_price = arcticFuturesContractPriceData(mongo_db=mongo_db) + data.mongo_futures_contract = mongoFuturesContractData(mongo_db=mongo_db) @@ -133,9 +129,9 @@ def _add_ib_class(self, class_object): except Exception as e: class_name = get_class_name(class_object) msg = ( - "Error %s couldn't evaluate %s(self.ib_conn, self, log = self.log.setup(component = %s)) This might be because (a) IB gateway not running, or (b) import is missing\ + "Error %s couldn't evaluate %s(self.ib_conn, self) This might be because (a) IB gateway not running, or (b) import is missing\ or (c) arguments don't follow pattern" - % (str(e), class_name, class_name) + % (str(e), class_name) ) self._raise_and_log_error(msg) @@ -148,10 +144,10 @@ def _add_mongo_class(self, class_object): except Exception as e: class_name = get_class_name(class_object) msg = ( - "Error '%s' couldn't evaluate %s(mongo_db=self.mongo_db, log = self.log.setup(component = %s)) \ + "Error '%s' couldn't evaluate %s(mongo_db=self.mongo_db) \ This might be because import is missing\ or arguments don't follow pattern" - % (str(e), class_name, class_name) + % (str(e), class_name) ) self._raise_and_log_error(msg) @@ -164,10 +160,10 @@ def _add_arctic_class(self, class_object): except Exception as e: class_name = get_class_name(class_object) msg = ( - "Error %s couldn't evaluate %s(mongo_db=self.mongo_db, log = self.log.setup(component = %s)) \ + "Error %s couldn't evaluate %s(mongo_db=self.mongo_db) \ This might be because import is missing\ or arguments don't follow pattern" - % (str(e), class_name, class_name) + % (str(e), class_name) ) self._raise_and_log_error(msg) @@ -182,10 +178,10 @@ def _add_parquet_class(self, class_object): except Exception as e: class_name = get_class_name(class_object) msg = ( - "Error '%s' couldn't evaluate %s(parquet_access = self.parquet_access, log = self.log.setup(component = %s)) \ + "Error '%s' couldn't evaluate %s(parquet_access = self.parquet_access) \ This might be because import is missing\ or arguments don't follow pattern or parquet_store is undefined" - % (str(e), class_name, class_name) + % (str(e), class_name) ) self._raise_and_log_error(msg) @@ -200,10 +196,10 @@ def _add_csv_class(self, class_object): except Exception as e: class_name = get_class_name(class_object) msg = ( - "Error %s couldn't evaluate %s(datapath = datapath, log = self.log.setup(component = %s)) \ + "Error %s couldn't evaluate %s(datapath = datapath) \ This might be because import is missing\ or arguments don't follow pattern" - % (str(e), class_name, class_name) + % (str(e), class_name) ) self._raise_and_log_error(msg) From 537d8be0b1615c9b29fcad5fedfd56796e7bc439 Mon Sep 17 00:00:00 2001 From: Andy Geach Date: Thu, 11 Jan 2024 11:51:43 +0000 Subject: [PATCH 191/235] remove references to log.setup() from POC --- examples/logging/poc.py | 7 ------- 1 file changed, 7 deletions(-) diff --git a/examples/logging/poc.py b/examples/logging/poc.py index 7ded0c1e2d..c3a335e587 100644 --- a/examples/logging/poc.py +++ b/examples/logging/poc.py @@ -83,13 +83,6 @@ level.info("does not print") level.warning("does print") - -# alias 'setup' -setup = get_logger("Setup", {"stage": "one", "type": "first"}) -setup.info("stage one, type first") -setup = setup.setup(stage="two") -setup.info("stage two, no type") - # replacing log.label() - we want to update the log attributes permanently - same as # overwrite label = get_logger("label", {"stage": "whatever"}) From 45b957c5c51fc24986ba74b9b7bf38ec70434b34 Mon Sep 17 00:00:00 2001 From: Andy Geach Date: Thu, 11 Jan 2024 11:52:07 +0000 Subject: [PATCH 192/235] removing log.setup() tests --- syslogging/tests/logging_tests.py | 15 --------------- 1 file changed, 15 deletions(-) diff --git a/syslogging/tests/logging_tests.py b/syslogging/tests/logging_tests.py index f0df24d9f5..5242c7ef18 100644 --- a/syslogging/tests/logging_tests.py +++ b/syslogging/tests/logging_tests.py @@ -156,18 +156,3 @@ def test_fx_log_attributes(self, caplog): logging.INFO, "no contract attributes", ) - - def test_setup(self): - logger = get_logger("my_type", {"stage": "bar"}) - logger = logger.setup(stage="left") - assert logger.name == "my_type" - assert logger.extra["stage"] == "left" - - no_attrs = get_logger("no_attrs") - no_attrs = no_attrs.setup(instrument_code="XYZ") - assert no_attrs.extra["instrument_code"] == "XYZ" - - def test_setup_bad(self): - logger = get_logger("my_type", {"stage": "bar"}) - with pytest.raises(Exception): - logger.setup(foo="bar") From 39eb0466c013eee1567f9e9482583a26d05d2ccd Mon Sep 17 00:00:00 2001 From: Andy Geach Date: Thu, 11 Jan 2024 11:52:32 +0000 Subject: [PATCH 193/235] removing setup() from adapter --- syslogging/adapter.py | 14 -------------- 1 file changed, 14 deletions(-) diff --git a/syslogging/adapter.py b/syslogging/adapter.py index fbfc3874cd..27e2f458de 100644 --- a/syslogging/adapter.py +++ b/syslogging/adapter.py @@ -1,6 +1,4 @@ -import logging import logging.config -import warnings from syslogdiag.pst_logger import * @@ -56,18 +54,6 @@ def _merge_attributes(self, method, attributes): return merged - def setup(self, **kwargs): - # Create a copy of me with different attributes - warnings.warn( - "The 'setup' function is deprecated; instead, " - "update attributes with method=clear/preserve/overwrite/temp", - DeprecationWarning, - 2, - ) - attributes = {**kwargs} - self._check_attributes(attributes) - return DynamicAttributeLogger(logging.getLogger(self.name), attributes) - def _check_attributes(self, attributes: dict): if attributes: bad_attributes = get_list_of_disallowed_attributes(attributes) From 123f4388edd269000a77e6178c64c7f0dc724bbb Mon Sep 17 00:00:00 2001 From: Andy Geach Date: Thu, 11 Jan 2024 11:52:53 +0000 Subject: [PATCH 194/235] removing reference to pst_logger --- sysproduction/update_multiple_adjusted_prices.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sysproduction/update_multiple_adjusted_prices.py b/sysproduction/update_multiple_adjusted_prices.py index d053f1dcc2..61edd2ecb2 100644 --- a/sysproduction/update_multiple_adjusted_prices.py +++ b/sysproduction/update_multiple_adjusted_prices.py @@ -106,7 +106,7 @@ def update_multiple_adjusted_prices_for_instrument( :param instrument_code: :param data: dataBlob - :param log: pst_logger + :param log: logger :return: None """ From 26515c3c84324c94af8e645837b7e98443d71432 Mon Sep 17 00:00:00 2001 From: Andy Geach Date: Thu, 11 Jan 2024 12:21:30 +0000 Subject: [PATCH 195/235] logging docs updated --- docs/production.md | 117 +++++++++++++++------------------------------ 1 file changed, 38 insertions(+), 79 deletions(-) diff --git a/docs/production.md b/docs/production.md index 12c7e28197..1537d924bd 100644 --- a/docs/production.md +++ b/docs/production.md @@ -659,7 +659,7 @@ Note: the configuration variable echo_extension will need changing in `private_c ### Logging -pysystemtrade uses the [Python logging module](https://docs.python.org/3.8/library/logging.html). See the [user guide for more detail](/docs/backtesting.md#logging) about logging in sim. Python logging is powerful and flexible, and log messages can be [formatted as you like, and sent virtually anywhere](https://docs.python.org/3.8/howto/logging.html#logging-advanced-tutorial) by providing your own config. But this section describes the default provided production setup. +pysystemtrade uses the [Python logging module](https://docs.python.org/3.10/library/logging.html). See the [user guide for more detail](/docs/backtesting.md#logging) about logging in sim. Python logging is powerful and flexible, and log messages can be [formatted as you like, and sent virtually anywhere](https://docs.python.org/3.8/howto/logging.html#logging-advanced-tutorial) by providing your own config. But this section describes the default provided production setup. In production, the requirements are more complex than in sim. As well as the context relevant attributes (that we have with sim), we also need - ability to log to the same file from different processes @@ -752,88 +752,47 @@ There is a special SMTP handler, for CRITICAL log messages only. This handler us #### Adding logging to your code -Here is an example of logging code (needs to adjusted for new style logging): +See the [logging docs](https://docs.python.org/3.10/library/logging.html) for usage examples. There are four ways to manage context attributes: +* *overwrite* - passed attributes are merged with any existing, overwriting duplicates (the default) +* *preserve* - passed attributes are merged with any existing, preserving duplicates +* *clear* - existing attributes are cleared, passed ones added +* *temp* - passed attributes will only be used for one invocation -```python -from syslogging.logger import * - - -def top_level_function(): - """ - This is a function that's called as the top level of a process - """ - - # logger setup - log = get_logger("top-level-function") - - # note use of log.setup when passing log to other components, this creates a copy of the existing log with an additional attribute set - TODO transition to sysloggging - conn = connectionIB(client=100, log=log.setup(component="IB-connection")) - - # - TODO transition to sysloggging - ibfxpricedata = ibFxPricesData(conn, log=log.setup(component="ibFxPricesData")) - - # - TODO transition to sysloggging - arcticfxdata = arcticFxPricesData(log=log.setup(component="arcticFxPricesData")) - - list_of_codes_all = ibfxpricedata.get_list_of_fxcodes() # codes must be in .csv file /sysbrokers/IB/ibConfigSpotFx.csv - log.debug("FX Codes: %s" % str(list_of_codes_all)) - for fx_code in list_of_codes_all: - - # Using log.label permanently adds the labelled attribute (although in this case it will be replaced on each iteration of the loop - TODO transition to sysloggging - log.label(currency_code=fx_code) - new_fx_prices = ibfxpricedata.get_fx_prices(fx_code) - - if len(new_fx_prices) == 0: - log.error("Error trying to get data for %s" % fx_code) - continue -``` - -#### Refactoring logging - -There is an ongoing project (June 2023) to migrate [legacy logging](/syslogdiag/pst_logger.py) to the built-in Python logging module. Currently, lots of methods are marked as deprecated - they will be refactored away in time. But if you are working on some code and want to make a change now: -- `log.msg()` - > `log.debug()` -- `log.terse()` - > `log.info()` -- `log.warn()` - > `log.warning()` - -For other methods, like `label()`, `setup()`, each should be taken on a case by case basis. Under the hood, a call to `get_logger()` creates an instance of `DynamicAttributeLogger` which has an instance of a [Python logger](https://docs.python.org/3.8/library/logging.html#logging.Logger). From the docs: - -> Multiple calls to getLogger() with the same name will always return a reference to the same Logger object. - -So our outer object handles the context attributes, and the inner `logging.Logger` object does the rest. We cannot copy logger instances as we did with the legacy system. Instead, we can manage the attributes with four ways to merge: *overwrite* (the default), *preserve*, *clear*, and *temp*. +#### Examples ```python # merging attributes: method 'overwrite' (default if no method supplied) - overwrite = get_logger("Overwrite", {"type": "first"}) - overwrite.info("overwrite, type 'first'") - overwrite.info( - "overwrite, type 'second', stage 'one'", - method="overwrite", - type="second", - stage="one", - ) - - # merging attributes: method 'preserve' - preserve = get_logger("Preserve", {"type": "first"}) - preserve.info("preserve, type 'first'") - preserve.info( - "preserve, type 'first', stage 'one'", method="preserve", type="second", stage="one" - ) - - # merging attributes: method 'clear' - clear = get_logger("Clear", {"type": "first", "stage": "one"}) - clear.info("clear, type 'first', stage 'one'") - clear.info("clear, type 'second', no stage", method="clear", type="second") - clear.info("clear, no attributes", method="clear") - - # merging attributes: method 'temp' - temp = get_logger("temp", {"type": "first"}) - temp.info("type should be 'first'") - temp.info( - "type should be 'second' temporarily", - method="temp", - type="second", - ) - temp.info("type should be back to 'first'") +overwrite = get_logger("Overwrite", {"type": "first"}) +overwrite.info("overwrite, type 'first'") +overwrite.info( + "overwrite, type 'second', stage 'one'", + method="overwrite", + type="second", + stage="one", +) + +# merging attributes: method 'preserve' +preserve = get_logger("Preserve", {"type": "first"}) +preserve.info("preserve, type 'first'") +preserve.info( + "preserve, type 'first', stage 'one'", method="preserve", type="second", stage="one" +) + +# merging attributes: method 'clear' +clear = get_logger("Clear", {"type": "first", "stage": "one"}) +clear.info("clear, type 'first', stage 'one'") +clear.info("clear, type 'second', no stage", method="clear", type="second") +clear.info("clear, no attributes", method="clear") + +# merging attributes: method 'temp' +temp = get_logger("temp", {"type": "first"}) +temp.info("type should be 'first'") +temp.info( + "type should be 'second' temporarily", + method="temp", + type="second", +) +temp.info("type should be back to 'first'") ``` #### Cleaning old logs From 846057a97146f343607822b96673d5a36635deca Mon Sep 17 00:00:00 2001 From: Andy Geach Date: Thu, 11 Jan 2024 12:54:38 +0000 Subject: [PATCH 196/235] link to python 3.10 logging docs --- docs/backtesting.md | 4 ++-- docs/production.md | 2 +- syslogging/handlers.py | 2 +- syslogging/server.py | 2 +- 4 files changed, 5 insertions(+), 5 deletions(-) diff --git a/docs/backtesting.md b/docs/backtesting.md index 33ea650053..81d7a5578b 100644 --- a/docs/backtesting.md +++ b/docs/backtesting.md @@ -3851,7 +3851,7 @@ These functions are used internally whenever a file name is passed in, so feel f ### Basic logging -pysystemtrade uses the [Python logging module](https://docs.python.org/3.8/library/logging.html). The system, data, config and each stage object all have a .log attribute, to allow the system to report to the user; as do the functions provided to estimate correlations and do optimisations. +pysystemtrade uses the [Python logging module](https://docs.python.org/3.10/library/logging.html). The system, data, config and each stage object all have a .log attribute, to allow the system to report to the user; as do the functions provided to estimate correlations and do optimisations. By default, log messages will print out to the console (`std.out`) at level DEBUG. This what you get in sim. This is configured by function `_configure_sim()` in `syslogging.logger.py`. @@ -3888,7 +3888,7 @@ I strongly encourage the use of logging, rather than printing, since printing on ### Advanced logging -In my experience wading through long log files is a rather time-consuming experience. On the other hand it's often more useful to use a logging approach to monitor system behaviour than to try and create quantitative diagnostics. For this reason I'm a big fan of logging with *attributes*. This project uses a custom version of [logging.LoggerAdapter](https://docs.python.org/3.8/library/logging.html#loggeradapter-objects) for that purpose: +In my experience wading through long log files is a rather time-consuming experience. On the other hand it's often more useful to use a logging approach to monitor system behaviour than to try and create quantitative diagnostics. For this reason I'm a big fan of logging with *attributes*. This project uses a custom version of [logging.LoggerAdapter](https://docs.python.org/3.10/library/logging.html#loggeradapter-objects) for that purpose: ```python from syslogging.logger import * diff --git a/docs/production.md b/docs/production.md index 1537d924bd..78f1ddff99 100644 --- a/docs/production.md +++ b/docs/production.md @@ -659,7 +659,7 @@ Note: the configuration variable echo_extension will need changing in `private_c ### Logging -pysystemtrade uses the [Python logging module](https://docs.python.org/3.10/library/logging.html). See the [user guide for more detail](/docs/backtesting.md#logging) about logging in sim. Python logging is powerful and flexible, and log messages can be [formatted as you like, and sent virtually anywhere](https://docs.python.org/3.8/howto/logging.html#logging-advanced-tutorial) by providing your own config. But this section describes the default provided production setup. +pysystemtrade uses the [Python logging module](https://docs.python.org/3.10/library/logging.html). See the [user guide for more detail](/docs/backtesting.md#logging) about logging in sim. Python logging is powerful and flexible, and log messages can be [formatted as you like, and sent virtually anywhere](https://docs.python.org/3.10/howto/logging.html#logging-advanced-tutorial) by providing your own config. But this section describes the default provided production setup. In production, the requirements are more complex than in sim. As well as the context relevant attributes (that we have with sim), we also need - ability to log to the same file from different processes diff --git a/syslogging/handlers.py b/syslogging/handlers.py index 4d992210ba..f973614d75 100644 --- a/syslogging/handlers.py +++ b/syslogging/handlers.py @@ -50,7 +50,7 @@ class LogRecordStreamHandler(socketserver.StreamRequestHandler): This basically logs the record using whatever logging policy is configured locally. - https://docs.python.org/3.8/howto/logging-cookbook.html#sending-and-receiving-logging-events-across-a-network + https://docs.python.org/3.10/howto/logging-cookbook.html#sending-and-receiving-logging-events-across-a-network """ def handle(self): diff --git a/syslogging/server.py b/syslogging/server.py index 49bb55cb36..4080f3205d 100644 --- a/syslogging/server.py +++ b/syslogging/server.py @@ -14,7 +14,7 @@ class LogRecordSocketReceiver(socketserver.ThreadingTCPServer): """ Simple TCP socket-based logging receiver - https://docs.python.org/3.8/howto/logging-cookbook.html#sending-and-receiving-logging-events-across-a-network + https://docs.python.org/3.10/howto/logging-cookbook.html#sending-and-receiving-logging-events-across-a-network """ allow_reuse_address = True From 341bae37faed75e5615bcb94c30da863e2dbe7ec Mon Sep 17 00:00:00 2001 From: Andy Geach Date: Thu, 11 Jan 2024 12:54:56 +0000 Subject: [PATCH 197/235] remove old POC logging doc --- examples/logging/logging_help.md | 138 ------------------------------- 1 file changed, 138 deletions(-) delete mode 100644 examples/logging/logging_help.md diff --git a/examples/logging/logging_help.md b/examples/logging/logging_help.md deleted file mode 100644 index 8e999ccad1..0000000000 --- a/examples/logging/logging_help.md +++ /dev/null @@ -1,138 +0,0 @@ -# Notes for switch to Python logging - -(To be merged into docs once changeover done) - -## Usage - -``` -from syslogging.logger import * - -# set up a logger with a name -log = get_logger("my_logger") - -# create a log message with a logging level -log.debug("debug message") -log.info("info message") -log.warning("warning message") -log.error("error message") -log.critical("critical message") - -# parameterise the message -log.info("Hello %s", "world") -log.info("Goodbye %s %s", "cruel", "world") - -# setting attributes on initialisation -log = get_logger("attributes", {"stage": "first"}) - -# setting attributes on message creation -log.info("logging call attributes", instrument_code="GOLD") -``` - -See [the POC](example/logging/poc.py) for more usage examples, and the [Python -docs](https://docs.python.org/3.8/library/logging.html) for more general info. - -## Configuration - -By default, log messages will print out to the console (`std.out`) at level DEBUG. This what you get in sim. This is configured by function `_configure_sim()` in `syslogging.logger.py`. - -If you want to change the level, or the format of the messages, then create an environment variable that points to an alternative YAML logging configuration. Something like this for Bash - -``` -PYSYS_LOGGING_CONFIG=/home/path/to/your/logging_config.yaml -``` - -It could be a file within the project, so will accept the relative dotted path format. There's an example YAML file that replicates the default sim configuration - -``` -PYSYS_LOGGING_CONFIG=syslogging.logging_sim.yaml -``` - -## Production - -In production, the requirements are more complex. As well as the context relevant attributes (that we have with sim), we also need -- ability to log to the same file from different processes -- output to console for echo files -- critical level messages to trigger an email - -Configure the default production setup with: - -``` -PYSYS_LOGGING_CONFIG=syslogging.logging_prod.yaml -``` - -At the client side, (pysystemtrade) there are three handlers: socket, console, and email. There is a server (separate process) for the socket handler. More details on each below - -### socket - -Python doesn't support more than one process writing to a file at the same time. So, on the client side, log messages are serialised and sent over the wire. A simple TCP socket server receives, de-serialises, and writes them to disk. The socket server needs to be running first. The simplest way to start it: - -``` -python -u $PYSYS_CODE/syslogging/server.py -``` - -But that would write logs to the current working directory. Probably not what you want. Instead, pass the log file path - -``` -python -u $PYSYS_CODE/syslogging/server.py --file /home/path/to/your/pysystemtrade.log -``` - -By default, the server accepts connections on port 6020. But if you want to use another - -``` -python -u $PYSYS_CODE/syslogging/server.py --port 6021 --file /home/path/to/your/pysystemtrade.log -``` - -The socket server also handles rotating the log files daily; the default setup rotates creates a new log at midnight each day, keeping the last 5 days' files. So after a week, the log directory file listing would look something like - -``` --rw-r--r-- 1 user group 19944754 May 4 15:42 pysystemtrade.log --rw-r--r-- 1 user group 19030250 Apr 24 22:16 pysystemtrade.log.2023-04-24 --rw-r--r-- 1 user group 6178163 Apr 25 22:16 pysystemtrade.log.2023-04-25 --rw-r--r-- 1 user group 9465225 Apr 26 22:16 pysystemtrade.log.2023-04-26 --rw-r--r-- 1 user group 4593885 Apr 27 16:53 pysystemtrade.log.2023-04-27 --rw-r--r-- 1 user group 4414970 May 3 22:16 pysystemtrade.log.2023-05-03 -``` - -The server needs to be running all the time. It needs to run in the background, start up on reboot, restart automatically in case of failure, etc. So a better way to do it would be to make it a service - -#### socket server as a service - -There is an example Linux systemd service file provided, see `examples/logging/logging_server.service`. And a setup guide [here](https://tecadmin.net/setup-autorun-python-script-using-systemd/). Basic setup for Debian/Ubuntu is: - -- create a new file at `/etc/systemd/system/logging_server.service` -- paste the example file into it -- update the paths in `ExecStart`. If using a virtual environment, make sure to use the correct path to Python -- update the `User` and `Group` values, so the log file is not owned by root -- update the path in `Environment`, if using a custom private config directory -- run the following commands to start/stop/restart etc - -``` -# reload daemon -sudo systemctl daemon-reload - -# enable service (restart on boot) -sudo systemctl enable log_server.service - -# view service status -sudo systemctl status log_server.service - -# start service -sudo systemctl start log_server.service - -# stop service -sudo systemctl stop log_server.service - -# restart -sudo systemctl restart log_server.service - -# view service log (not pysystemtrade log) -sudo journalctl -e -u log_server.service -``` - -### console - -All log messages also get sent to console, as with sim. The supplied `crontab` entries would therefore also pipe their output to the echo files - -### email - -There is a special SMTP handler, for CRITICAL log messages only. This handler uses the configured pysystemtrade email settings to send those messages as emails From 35a961bd81634ed595a7437498cf5634514dce5c Mon Sep 17 00:00:00 2001 From: Rob Carver Date: Tue, 16 Jan 2024 12:08:31 +0000 Subject: [PATCH 198/235] temp override needs to handle missing data --- requirements.txt | 1 + setup.py | 1 + sysdata/mongodb/mongo_temporary_override.py | 8 +++++++- 3 files changed, 9 insertions(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 3572227e93..bc06d8a2ff 100644 --- a/requirements.txt +++ b/requirements.txt @@ -12,3 +12,4 @@ Werkzeug>=2.0.1 statsmodels==0.14.0 PyPDF2>=2.5.0 pyarrow>=14.0.1 +scikit-learn>1.3.0 \ No newline at end of file diff --git a/setup.py b/setup.py index 1a1836b9f8..0c49a280fe 100755 --- a/setup.py +++ b/setup.py @@ -98,6 +98,7 @@ def dir_this_file(): "statsmodels==0.14.0", "PyPDF2>=2.5.0", "pyarrow>=14.0.1", + "scikit-learn>1.3.0" ], tests_require=["nose", "flake8"], extras_require=dict(), diff --git a/sysdata/mongodb/mongo_temporary_override.py b/sysdata/mongodb/mongo_temporary_override.py index 9b4b26916c..f4ca22716d 100644 --- a/sysdata/mongodb/mongo_temporary_override.py +++ b/sysdata/mongodb/mongo_temporary_override.py @@ -1,3 +1,4 @@ +from syscore.exceptions import missingData from syscore.constants import arg_not_supplied from sysdata.production.temporary_override import temporaryOverrideData @@ -5,6 +6,8 @@ from sysobjects.production.override import Override from sysdata.mongodb.mongo_generic import mongoDataWithSingleKey from syslogging.logger import get_logger +from sysobjects.production.override import DEFAULT_OVERRIDE + TEMPORARY_OVERRIDE_COLLECTION = "temporary_override_collection" KEY = "instrument_code" @@ -27,7 +30,10 @@ def mongo_data(self): return self._mongo_data def get_stored_override_for_instrument(self, instrument_code: str) -> Override: - override_as_dict = self.mongo_data.get_result_dict_for_key(instrument_code) + try: + override_as_dict = self.mongo_data.get_result_dict_for_key(instrument_code) + except missingData: + return DEFAULT_OVERRIDE return from_dict_to_override(override_as_dict) From 1b6f4dfc4fdca50f4721f91e08808ec2bee190c8 Mon Sep 17 00:00:00 2001 From: Andy Geach Date: Tue, 16 Jan 2024 18:21:23 +0000 Subject: [PATCH 199/235] black --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 0c49a280fe..0820d53cbc 100755 --- a/setup.py +++ b/setup.py @@ -98,7 +98,7 @@ def dir_this_file(): "statsmodels==0.14.0", "PyPDF2>=2.5.0", "pyarrow>=14.0.1", - "scikit-learn>1.3.0" + "scikit-learn>1.3.0", ], tests_require=["nose", "flake8"], extras_require=dict(), From 76f2224794bb0c2d75399d87c2e116437b274b9b Mon Sep 17 00:00:00 2001 From: Andy Geach Date: Wed, 17 Jan 2024 09:18:36 +0000 Subject: [PATCH 200/235] spelling --- docs/backtesting.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/backtesting.md b/docs/backtesting.md index 33ea650053..0c5de5bd5d 100644 --- a/docs/backtesting.md +++ b/docs/backtesting.md @@ -1718,7 +1718,7 @@ system.cache.get_cache_refs_for_instrument("EDOLLAR") ## if we change the config system.config.forecast_div_multiplier=100.0 -## ... then the result will be different without neeting to create a new system +## ... then the result will be different without needing to create a new system system.combForecast.get_combined_forecast("EDOLLAR") ``` From 610679b6bc749ae0e4aea9ba5bee54047dc77404 Mon Sep 17 00:00:00 2001 From: Andy Geach Date: Wed, 17 Jan 2024 09:18:47 +0000 Subject: [PATCH 201/235] grammar --- docs/IB.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/IB.md b/docs/IB.md index 9134056e29..c5eb6f630d 100644 --- a/docs/IB.md +++ b/docs/IB.md @@ -297,7 +297,7 @@ You can use this directly if you are familiar with ib_insync eg `conn.ib.positio ### Make multiple connections -It's possible to have multiple connections to the IB Gateway, each from it's own process, but each connection must have a unique clientid. Used clientid's are stored in a the active database (usually mongoDB) to ensure we don't re-use active clientids. +It's possible to have multiple connections to the IB Gateway, each from its own process, but each connection must have a unique clientid. Used clientids are stored in the active database (usually mongoDB) to ensure we don't re-use active clientids. From 2795965acc4eae6b265078496bc906d458894c3e Mon Sep 17 00:00:00 2001 From: Andy Geach Date: Wed, 17 Jan 2024 09:19:06 +0000 Subject: [PATCH 202/235] fix shadow cost default --- docs/production_strategy_changes.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/production_strategy_changes.md b/docs/production_strategy_changes.md index 6007d1fe2d..5a5f1d1ad1 100644 --- a/docs/production_strategy_changes.md +++ b/docs/production_strategy_changes.md @@ -188,7 +188,7 @@ Under ignore_instruments in the .yaml configuration, I suggest you include only ### Set shadow cost -The shadow cost is a key variable which is set in the private_config.yaml file (*not* the backtest configuration file, since it is used 'outside' the backtest in the strategy order generation). The default value is 10, but you may want to initially begin with a very large value (eg 500) and gradually reduce it over the first few days. This will produce a more gradual adjustment from old to new strategy positions, although bear in mind that any strategy position with the wrong sign will immediately be closed regardless of the shadow_cost value unless you set this instrument to don't trade. +The shadow cost is a key variable which is set in the private_config.yaml file (*not* the backtest configuration file, since it is used 'outside' the backtest in the strategy order generation). The default value is 50, but you may want to initially begin with a very large value (eg 500) and gradually reduce it over the first few days. This will produce a more gradual adjustment from old to new strategy positions, although bear in mind that any strategy position with the wrong sign will immediately be closed regardless of the shadow_cost value unless you set this instrument to don't trade. ### Strategy backtest output of optimal positions From 1d4aceb645dbef7c76aeb082be6ad591ba1a87b2 Mon Sep 17 00:00:00 2001 From: Andy Geach Date: Wed, 17 Jan 2024 09:19:17 +0000 Subject: [PATCH 203/235] spelling --- syscore/pandas/pdutils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/syscore/pandas/pdutils.py b/syscore/pandas/pdutils.py index 272af80866..6d1599e4f0 100755 --- a/syscore/pandas/pdutils.py +++ b/syscore/pandas/pdutils.py @@ -294,7 +294,7 @@ def make_df_from_list_of_named_tuple( field_name_for_index: str = arg_not_supplied, ): """ - Turn a list of named tuplies into a dataframe + Turn a list of named tuples into a dataframe The first element in the tuple will become the index >>> T = namedtuple('T', 'name value_a value_b') From 90d78ff12415bdaae68af26248936fa7b23e6eff Mon Sep 17 00:00:00 2001 From: Andy Geach Date: Wed, 17 Jan 2024 09:19:27 +0000 Subject: [PATCH 204/235] spelling --- sysexecution/algos/common_functions.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sysexecution/algos/common_functions.py b/sysexecution/algos/common_functions.py index 0ac8f7ff71..0d23d004a2 100644 --- a/sysexecution/algos/common_functions.py +++ b/sysexecution/algos/common_functions.py @@ -37,7 +37,7 @@ def cancel_order( data_broker = dataBroker(data) data_broker.cancel_order_given_control_object(broker_order_with_controls) - # Wait for cancel. It's vitual we do this since if a fill comes in before we finish it will screw + # Wait for cancel. It's vital we do this since if a fill comes in before we finish it will screw # everything up... timer = quickTimer(seconds=CANCEL_WAIT_TIME) not_cancelled = True From 3b5c02e21785da4c545532472f861125613a3c27 Mon Sep 17 00:00:00 2001 From: Andy Geach Date: Wed, 17 Jan 2024 09:19:39 +0000 Subject: [PATCH 205/235] spelling --- sysexecution/stack_handler/cancel_and_modify.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sysexecution/stack_handler/cancel_and_modify.py b/sysexecution/stack_handler/cancel_and_modify.py index 733c08cb49..942e529c71 100644 --- a/sysexecution/stack_handler/cancel_and_modify.py +++ b/sysexecution/stack_handler/cancel_and_modify.py @@ -105,7 +105,7 @@ def list_of_orders_not_yet_cancelled( if order_is_cancelled: new_list_of_orders.remove(broker_order) self.log.debug( - "Order %s succesfully cancelled" % broker_order, + "Order %s successfully cancelled" % broker_order, **broker_order.log_attributes(), method="temp", ) From ab1b80684a13f2021210874630aa2f4266a6cb7d Mon Sep 17 00:00:00 2001 From: Andy Geach Date: Wed, 17 Jan 2024 09:19:50 +0000 Subject: [PATCH 206/235] spelling --- sysexecution/stack_handler/checks.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sysexecution/stack_handler/checks.py b/sysexecution/stack_handler/checks.py index ae3cafeefe..c76fc76d7a 100644 --- a/sysexecution/stack_handler/checks.py +++ b/sysexecution/stack_handler/checks.py @@ -45,7 +45,7 @@ def log_and_lock_position_break(self, contract: futuresContract): instrument_code = contract.instrument_code data_locks = dataLocks(self.data) if data_locks.is_instrument_locked(instrument_code): - # alread locked + # already locked return None else: self.log.critical("Break for %s: locking instrument" % (str(contract))) From faec617733706dbfe960d1acf961b8132978fa0c Mon Sep 17 00:00:00 2001 From: Andy Geach Date: Wed, 17 Jan 2024 09:20:42 +0000 Subject: [PATCH 207/235] inherite -> inherit --- sysobjects/production/optimal_positions.py | 2 +- sysquant/estimators/generic_estimator.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/sysobjects/production/optimal_positions.py b/sysobjects/production/optimal_positions.py index 9dae45f819..35306e06eb 100644 --- a/sysobjects/production/optimal_positions.py +++ b/sysobjects/production/optimal_positions.py @@ -236,7 +236,7 @@ def _check_append_positions_okay( ) -## IMPORTANT NOTE: if you create a new kind of optimal position which does not inherite from +## IMPORTANT NOTE: if you create a new kind of optimal position which does not inherit from ## baseOptimalPosition directly, need to manually add it here MASTER_LIST_OF_OPTIMAL_POSITION_CLASSES = baseOptimalPosition.__subclasses__() diff --git a/sysquant/estimators/generic_estimator.py b/sysquant/estimators/generic_estimator.py index 489f78a5d7..4b30889d56 100644 --- a/sysquant/estimators/generic_estimator.py +++ b/sysquant/estimators/generic_estimator.py @@ -88,7 +88,7 @@ def get_estimate_for_fitperiod(self, fit_period: fitDates) -> Estimate: return estimate def get_estimate_for_fitperiod_with_data(self, fit_period: fitDates) -> Estimate: - raise NotImplementedError("Have to inherite from base class") + raise NotImplementedError("Have to inherit from base class") class genericEstimator(object): From 7d126a265b3c4717e314eee076fc20ab9e6d0b61 Mon Sep 17 00:00:00 2001 From: Andy Geach Date: Wed, 17 Jan 2024 09:21:09 +0000 Subject: [PATCH 208/235] augemented -> augmented --- sysexecution/orders/broker_orders.py | 2 +- sysproduction/data/orders.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/sysexecution/orders/broker_orders.py b/sysexecution/orders/broker_orders.py index efe3b6ff24..a50e92d76e 100644 --- a/sysexecution/orders/broker_orders.py +++ b/sysexecution/orders/broker_orders.py @@ -416,7 +416,7 @@ def create_new_broker_order_from_contract_order( ## Not very pretty but only used for diagnostic TCA class brokerOrderWithParentInformation(brokerOrder): @classmethod - def create_augemented_order( + def create_augmented_order( self, order: brokerOrder, instrument_order: instrumentOrder, diff --git a/sysproduction/data/orders.py b/sysproduction/data/orders.py index d7eb4cc95e..3725e67292 100644 --- a/sysproduction/data/orders.py +++ b/sysproduction/data/orders.py @@ -189,7 +189,7 @@ def get_historic_broker_order_from_order_id_with_execution_data( self.get_parent_instrument_order_for_historic_broker_order_id(order_id) ) - augmented_order = brokerOrderWithParentInformation.create_augemented_order( + augmented_order = brokerOrderWithParentInformation.create_augmented_order( order, contract_order=contract_order, instrument_order=instrument_order ) From 2776cb6ab36952cdc4e3ae73eb0a470553fa2777 Mon Sep 17 00:00:00 2001 From: Andy Geach Date: Wed, 17 Jan 2024 09:25:00 +0000 Subject: [PATCH 209/235] overriden -> overridden --- docs/instruments.md | 4 ++-- docs/production.md | 18 +++++++++--------- .../IB/ib_futures_contract_price_data.py | 2 +- syscontrol/run_process.py | 2 +- .../arctic_futures_per_contract_prices.py | 2 +- sysdata/base_data.py | 4 ++-- sysdata/config/configdata.py | 2 +- sysdata/csv/csv_futures_contract_prices.py | 2 +- sysdata/mongodb/mongo_historic_orders.py | 4 ++-- .../parquet_futures_per_contract_prices.py | 2 +- sysdata/sim/sim_data.py | 4 ++-- sysexecution/algos/allocate_algo_to_order.py | 4 ++-- .../order_stacks/broker_order_stack.py | 2 +- .../order_stacks/contract_order_stack.py | 2 +- sysexecution/order_stacks/order_stack.py | 10 +++++----- sysobjects/production/tradeable_object.py | 4 ++-- .../reporting/adhoc/dynamic_optimisation.py | 2 +- .../vol_attenuation_forecast_scale_cap.py | 2 +- systems/trading_rules.py | 2 +- 19 files changed, 37 insertions(+), 37 deletions(-) diff --git a/docs/instruments.md b/docs/instruments.md index 882b1a4701..7ebdc4bdbd 100644 --- a/docs/instruments.md +++ b/docs/instruments.md @@ -348,9 +348,9 @@ Operating in the production environment is a bit more complex, due to the intera ## A note about configuration -When you're running in simulation things are relatively simple; configuration items are defined in defaults_yaml, but can be overriden by your private_config.yaml, and then also by your own backtest.yaml file. +When you're running in simulation things are relatively simple; configuration items are defined in defaults_yaml, but can be overridden by your private_config.yaml, and then also by your own backtest.yaml file. -Importantly, once we're out of the 'backtesting'' part of a production system, we can't see the backtest configuration (which after all is system specific, whereas generally in the production environment we're working with global parameters). So the priority order is `defaults.yaml`, overriden by `private_config.yaml`. The downstream code that produces strategy orders once the production backtest has generated optimal positions, and then trades those orders, will operate only on the configuration in `private_config.yaml` and `defaults.yaml`. +Importantly, once we're out of the 'backtesting'' part of a production system, we can't see the backtest configuration (which after all is system specific, whereas generally in the production environment we're working with global parameters). So the priority order is `defaults.yaml`, overridden by `private_config.yaml`. The downstream code that produces strategy orders once the production backtest has generated optimal positions, and then trades those orders, will operate only on the configuration in `private_config.yaml` and `defaults.yaml`. ## Reduce only and other constraints in static systems diff --git a/docs/production.md b/docs/production.md index 12c7e28197..019bdf6e1b 100644 --- a/docs/production.md +++ b/docs/production.md @@ -1719,7 +1719,7 @@ Linux script: Called by: `run_systems` -The code to run each strategies backtest is defined in the configuration parameter in the control_config.yaml file (or overriden in the private_control_config.yaml file): `process_configuration_methods/run_systems/strategy_name/`. For example: +The code to run each strategies backtest is defined in the configuration parameter in the control_config.yaml file (or overridden in the private_control_config.yaml file): `process_configuration_methods/run_systems/strategy_name/`. For example: ``` process_configuration_methods: @@ -1763,7 +1763,7 @@ Linux script: Called by: `run_strategy_order_generator` -The code to run each strategy's backtest is defined in the configuration parameter in the control_config.yaml file (or overriden in the private_control_config.yaml file): `process_configuration_methods/run_systems/strategy_name/`. For example: +The code to run each strategy's backtest is defined in the configuration parameter in the control_config.yaml file (or overridden in the private_control_config.yaml file): `process_configuration_methods/run_systems/strategy_name/`. For example: ``` @@ -2596,7 +2596,7 @@ Useful things to note about the crontab: #### Process configuration -Process configuration is governed by the following config parameters (in [/syscontrol/control_config.yaml](/syscontrol/control_config.yaml), or these will be overriden by /private/private_control_config.yaml): +Process configuration is governed by the following config parameters (in [/syscontrol/control_config.yaml](/syscontrol/control_config.yaml), or these will be overridden by /private/private_control_config.yaml): - `process_configuration_start_time`: when the process starts (default 00:01) - `process_configuration_stop_time`: when the process ends, regardless of any method configuration (default 23:50) @@ -2825,7 +2825,7 @@ The following are configuration options that are not in defaults.yaml and *may* - `email_server`: this is the outgoing server -The following are configuration options that are in defaults.yaml and can be overriden in private_config.yaml: +The following are configuration options that are in defaults.yaml and can be overridden in private_config.yaml: [Backup paths](#data-backup) - `backtest_store_directory` parent directory, backtests are stored under strategy_name subdirectory @@ -2864,7 +2864,7 @@ Outside of the backtest code, in production configuration options are pulled in ### Control config files -As discussed above, these are used purely for control and monitoring purposes in [/syscontrol/control_config.yaml](/syscontrol/control_config.yaml), overriden by /private/private_control_config.yaml). +As discussed above, these are used purely for control and monitoring purposes in [/syscontrol/control_config.yaml](/syscontrol/control_config.yaml), overridden by /private/private_control_config.yaml). ### Broker and data source specific configuration files @@ -2942,7 +2942,7 @@ You can also change other values in the interactive tool, but be careful and mak ## Strategies -Each strategy is defined in the config parameter `strategy_list`, found either in the defaults.yaml file or overriden in private yaml configuration. The following shows the parameters for an example strategy, named (appropriately enough) `example`. +Each strategy is defined in the config parameter `strategy_list`, found either in the defaults.yaml file or overridden in private yaml configuration. The following shows the parameters for an example strategy, named (appropriately enough) `example`. ``` strategy_list: @@ -2956,7 +2956,7 @@ strategy_list: ### Strategy capital -Strategy capital is allocated from [total capital](#capital). This is done by the scripted function, [update strategy capital](#allocate-capital-to-strategies). It is controlled by the configuration element below (in the defaults.yaml file, or overriden in private_config.yaml). +Strategy capital is allocated from [total capital](#capital). This is done by the scripted function, [update strategy capital](#allocate-capital-to-strategies). It is controlled by the configuration element below (in the defaults.yaml file, or overridden in private_config.yaml). ``` strategy_capital_allocation: @@ -2970,7 +2970,7 @@ The allocation calls the function specified, with any other parameters passed as #### Risk target -The actual risk a strategy will take depends on both it's capital and it's risk target. The risk target is set in the configuration option, `percentage_vol_target`, in the backtest configuration .yaml file for the relevant strategy (if not supplied, the defaults.yaml value is used; this is *not* overriden by private_config.yaml). Risk targets can be different across strategies. +The actual risk a strategy will take depends on both it's capital and it's risk target. The risk target is set in the configuration option, `percentage_vol_target`, in the backtest configuration .yaml file for the relevant strategy (if not supplied, the defaults.yaml value is used; this is *not* overridden by private_config.yaml). Risk targets can be different across strategies. #### Changing risk targets and/or capital @@ -3693,7 +3693,7 @@ V2X [20201118, 20201216] 2020-10-15 09:43:30 (1, -1) The strategy report is bespoke to a strategy; it will load the last backtest file generated and report diagnostics from it. On a daily basis it runs for all strategies. On an ad hoc basis, it can be run for all or a single strategy. -The strategy reporting is determined by the parameter `strategy_list/strategy_name/reporting_code/function` in default.yaml or overriden in the private config .yaml file. The 'classic' reporting function is `sysproduction.strategy_code.report_system_classic.report_system_classic` +The strategy reporting is determined by the parameter `strategy_list/strategy_name/reporting_code/function` in default.yaml or overridden in the private config .yaml file. The 'classic' reporting function is `sysproduction.strategy_code.report_system_classic.report_system_classic` Here is an example, with annotations added in quotes (""): diff --git a/sysbrokers/IB/ib_futures_contract_price_data.py b/sysbrokers/IB/ib_futures_contract_price_data.py index 82cc1c376c..7289a6c410 100644 --- a/sysbrokers/IB/ib_futures_contract_price_data.py +++ b/sysbrokers/IB/ib_futures_contract_price_data.py @@ -94,7 +94,7 @@ def has_merged_price_data_for_contract( """ Does IB have data for a given contract? - Overriden because we will have a problem matching expiry dates to nominal yyyymm dates + Overridden because we will have a problem matching expiry dates to nominal yyyymm dates :param contract_object: :return: bool """ diff --git a/syscontrol/run_process.py b/syscontrol/run_process.py index 0f6a46cfb0..9c1270638f 100644 --- a/syscontrol/run_process.py +++ b/syscontrol/run_process.py @@ -2,7 +2,7 @@ General class for 'running' processes We kick them all off in the crontab at a specific time (midnight is easiest), but their subsequent behaviour will - depend on various rules, as defined in ... attribute of defaults.yaml or overriden in private_config + depend on various rules, as defined in ... attribute of defaults.yaml or overridden in private_config - is my process marked as NO OPEN in process control (check database) - is it too early for me to run? (defined in .yaml) diff --git a/sysdata/arctic/arctic_futures_per_contract_prices.py b/sysdata/arctic/arctic_futures_per_contract_prices.py index ad64fea8e0..bcda327f8c 100644 --- a/sysdata/arctic/arctic_futures_per_contract_prices.py +++ b/sysdata/arctic/arctic_futures_per_contract_prices.py @@ -72,7 +72,7 @@ def _write_merged_prices_for_contract_object_no_checking( ): """ Write prices - CHECK prices are overriden on second write + CHECK prices are overridden on second write :param futures_contract_object: futuresContract :param futures_price_data: futuresContractPriceData diff --git a/sysdata/base_data.py b/sysdata/base_data.py index eb29b343ea..26884f837e 100644 --- a/sysdata/base_data.py +++ b/sysdata/base_data.py @@ -51,7 +51,7 @@ def __getitem__(self, keyname): """ raise Exception( - "__getitem__ not defined for baseData class: use a class where it has been overriden" + "__getitem__ not defined for baseData class: use a class where it has been overridden" ) def keys(self): @@ -66,7 +66,7 @@ def keys(self): """ raise Exception( - "keys() not defined for baseData class: use a class where it has been overriden" + "keys() not defined for baseData class: use a class where it has been overridden" ) diff --git a/sysdata/config/configdata.py b/sysdata/config/configdata.py index 48cefb0a59..bdf202388a 100644 --- a/sysdata/config/configdata.py +++ b/sysdata/config/configdata.py @@ -74,7 +74,7 @@ def __init__( """ - # this will normally be overriden by the base system + # this will normally be overridden by the base system self.log = get_logger( "config", {TYPE_LOG_LABEL: "config", STAGE_LOG_LABEL: "config"} ) diff --git a/sysdata/csv/csv_futures_contract_prices.py b/sysdata/csv/csv_futures_contract_prices.py index d243d088dd..e1b9f8fcba 100644 --- a/sysdata/csv/csv_futures_contract_prices.py +++ b/sysdata/csv/csv_futures_contract_prices.py @@ -114,7 +114,7 @@ def _write_merged_prices_for_contract_object_no_checking( ): """ Write prices - CHECK prices are overriden on second write + CHECK prices are overridden on second write :param futures_contract_object: futuresContract :param futures_price_data: futuresContractPriceData diff --git a/sysdata/mongodb/mongo_historic_orders.py b/sysdata/mongodb/mongo_historic_orders.py index 8c823ac033..406aa2cd76 100644 --- a/sysdata/mongodb/mongo_historic_orders.py +++ b/sysdata/mongodb/mongo_historic_orders.py @@ -61,7 +61,7 @@ def __repr__(self): return "%s (%s)" % (self._name, str(self.mongo_data)) def add_order_to_data(self, order: Order, ignore_duplication: bool = False): - # Duplicates will be overriden, so be careful + # Duplicates will be overridden, so be careful order_id = order.order_id no_existing_order = self.get_order_with_orderid(order_id) is missing_order if no_existing_order: @@ -76,7 +76,7 @@ def add_order_to_data(self, order: Order, ignore_duplication: bool = False): ) def _add_order_to_data_no_checking(self, order: Order): - # Duplicates will be overriden, so be careful + # Duplicates will be overridden, so be careful mongo_record = order.as_dict() self.mongo_data.add_data(order.order_id, mongo_record, allow_overwrite=True) diff --git a/sysdata/parquet/parquet_futures_per_contract_prices.py b/sysdata/parquet/parquet_futures_per_contract_prices.py index 580574c226..852ea2da16 100644 --- a/sysdata/parquet/parquet_futures_per_contract_prices.py +++ b/sysdata/parquet/parquet_futures_per_contract_prices.py @@ -67,7 +67,7 @@ def _write_merged_prices_for_contract_object_no_checking( ): """ Write prices - CHECK prices are overriden on second write + CHECK prices are overridden on second write :param futures_contract_object: futuresContract :param futures_price_data: futuresContractPriceData diff --git a/sysdata/sim/sim_data.py b/sysdata/sim/sim_data.py index 7763ccc016..fa06c3a91e 100644 --- a/sysdata/sim/sim_data.py +++ b/sysdata/sim/sim_data.py @@ -174,7 +174,7 @@ def get_raw_price(self, instrument_code: str) -> pd.Series: """ Default method to get instrument price at 'natural' frequency - Will usually be overriden when inherited with specific data source + Will usually be overridden when inherited with specific data source :param instrument_code: instrument to get prices for :type instrument_code: str @@ -194,7 +194,7 @@ def get_raw_price_from_start_date( """ Default method to get instrument price at 'natural' frequency - Will usually be overriden when inherited with specific data source + Will usually be overridden when inherited with specific data source :param instrument_code: instrument to get prices for :type instrument_code: str diff --git a/sysexecution/algos/allocate_algo_to_order.py b/sysexecution/algos/allocate_algo_to_order.py index 83981b7ec7..22b37e2b62 100644 --- a/sysexecution/algos/allocate_algo_to_order.py +++ b/sysexecution/algos/allocate_algo_to_order.py @@ -92,7 +92,7 @@ def check_and_if_required_allocate_algo_to_single_contract_order( # not used yet, but maybe in the future is_roll_order = instrument_order.roll_order - if algo_allocation_is_overriden_for_instrument( + if algo_allocation_is_overridden_for_instrument( contract_order=contract_order, config=config ): contract_order = allocate_algo_for_specific_instrument_with_override( @@ -143,7 +143,7 @@ def already_has_algo_allocated(contract_order: contractOrder) -> bool: return contract_order.algo_to_use != "" -def algo_allocation_is_overriden_for_instrument( +def algo_allocation_is_overridden_for_instrument( contract_order: contractOrder, config: AlgoConfig ) -> bool: instrument_code = contract_order.instrument_code diff --git a/sysexecution/order_stacks/broker_order_stack.py b/sysexecution/order_stacks/broker_order_stack.py index 52a4de8925..c3d6eb2c0d 100644 --- a/sysexecution/order_stacks/broker_order_stack.py +++ b/sysexecution/order_stacks/broker_order_stack.py @@ -31,7 +31,7 @@ def find_order_with_broker_tempid(self, broker_tempid: str): return missing_order def get_order_with_id_from_stack(self, order_id: int) -> brokerOrder: - # probably will be overriden in data implementation + # probably will be overridden in data implementation # only here so the appropriate type is shown as being returned order = self.stack.get(order_id, missing_order) diff --git a/sysexecution/order_stacks/contract_order_stack.py b/sysexecution/order_stacks/contract_order_stack.py index 276e44c1ed..d5b7c91e96 100644 --- a/sysexecution/order_stacks/contract_order_stack.py +++ b/sysexecution/order_stacks/contract_order_stack.py @@ -74,7 +74,7 @@ def release_order_from_algo_control(self, order_id: int): raise Exception(error_msg) def get_order_with_id_from_stack(self, order_id: int) -> contractOrder: - # probably will be overriden in data implementation + # probably will be overridden in data implementation # only here so the appropriate type is shown as being returned order = self.stack.get(order_id, missing_order) diff --git a/sysexecution/order_stacks/order_stack.py b/sysexecution/order_stacks/order_stack.py index 4e39ffc342..e2c95987fe 100644 --- a/sysexecution/order_stacks/order_stack.py +++ b/sysexecution/order_stacks/order_stack.py @@ -526,29 +526,29 @@ def _delete_entire_stack_without_checking_only_use_when_debugging(self): # LOW LEVEL OPERATIONS to include in specific implementation def _get_list_of_all_order_ids(self) -> list: - # probably will be overriden in data implementation + # probably will be overridden in data implementation raise NotImplementedError # deleting def _remove_order_with_id_from_stack_no_checking(self, order_id: int): - # probably will be overriden in data implementation + # probably will be overridden in data implementation raise NotImplementedError def _change_order_on_stack_no_checking(self, order_id: int, order: Order): # - # probably will be overriden in data implementation + # probably will be overridden in data implementation raise NotImplementedError def get_order_with_id_from_stack(self, order_id: int) -> Order: - # probably will be overriden in data implementation + # probably will be overridden in data implementation # return missing_order if not found raise NotImplementedError def _put_order_on_stack_no_checking(self, order: Order): - # probably will be overriden in data implementation + # probably will be overridden in data implementation raise NotImplementedError diff --git a/sysobjects/production/tradeable_object.py b/sysobjects/production/tradeable_object.py index 62f30f6b33..00c02a35bf 100644 --- a/sysobjects/production/tradeable_object.py +++ b/sysobjects/production/tradeable_object.py @@ -11,7 +11,7 @@ class tradeableObject(object): """ def __init__(self, object_name): - # probably overriden with nicer entry + # probably overridden with nicer entry self._key = object_name def __repr__(self): @@ -26,7 +26,7 @@ def __eq__(self, other): @property def key(self): - # probably overriden + # probably overridden return self._key diff --git a/sysproduction/reporting/adhoc/dynamic_optimisation.py b/sysproduction/reporting/adhoc/dynamic_optimisation.py index bf76dbfa1a..95f6f59e70 100644 --- a/sysproduction/reporting/adhoc/dynamic_optimisation.py +++ b/sysproduction/reporting/adhoc/dynamic_optimisation.py @@ -35,7 +35,7 @@ def get_notional_risk_target(): - ## might be overriden by strategy but we don't have the backtest .yaml here + ## might be overridden by strategy but we don't have the backtest .yaml here return 25.0 diff --git a/systems/provided/attenuate_vol/vol_attenuation_forecast_scale_cap.py b/systems/provided/attenuate_vol/vol_attenuation_forecast_scale_cap.py index f0343c8cf3..3f0c22ccb8 100644 --- a/systems/provided/attenuate_vol/vol_attenuation_forecast_scale_cap.py +++ b/systems/provided/attenuate_vol/vol_attenuation_forecast_scale_cap.py @@ -36,7 +36,7 @@ def get_raw_forecast_before_attenuation(self, instrument_code, rule_variation_na @diagnostic() def get_raw_forecast(self, instrument_code, rule_variation_name): - ## overriden method this will be called downstream so don't change name + ## overridden method this will be called downstream so don't change name raw_forecast_before_atten = self.get_raw_forecast_before_attenuation( instrument_code, rule_variation_name ) diff --git a/systems/trading_rules.py b/systems/trading_rules.py index f920ab6be2..1f36280b94 100644 --- a/systems/trading_rules.py +++ b/systems/trading_rules.py @@ -120,7 +120,7 @@ def _get_data_from_system(self, system: "System", instrument_code: str): """ # Following is a list of additional kwargs to pass to the data functions. Can be empty dicts - # Use copy as can be overriden + # Use copy as can be overridden list_of_data_str_references = self.data list_of_args_to_pass_to_data_calls = copy(self.data_args) From f4bac42f0548e9905adcbbe0ccd5a2a96a391552 Mon Sep 17 00:00:00 2001 From: Andy Geach Date: Wed, 17 Jan 2024 09:25:35 +0000 Subject: [PATCH 210/235] spelling --- systems/tests/test_mp_optimise_over_time.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/systems/tests/test_mp_optimise_over_time.py b/systems/tests/test_mp_optimise_over_time.py index 21d099e974..74a782685a 100644 --- a/systems/tests/test_mp_optimise_over_time.py +++ b/systems/tests/test_mp_optimise_over_time.py @@ -28,7 +28,7 @@ def test_pickling(self): # pickle net_returns_pkl_s = pkl.dumps(self.net_returns) - # unplickle + # unpickle net_returns = pkl.loads(net_returns_pkl_s) # attributes after unpicking From e6476474ea049e94e50aa963550adc21fc3196d6 Mon Sep 17 00:00:00 2001 From: Andy Geach Date: Wed, 17 Jan 2024 09:26:09 +0000 Subject: [PATCH 211/235] grammar --- docs/production.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/production.md b/docs/production.md index 019bdf6e1b..2b0af24bb2 100644 --- a/docs/production.md +++ b/docs/production.md @@ -1719,7 +1719,7 @@ Linux script: Called by: `run_systems` -The code to run each strategies backtest is defined in the configuration parameter in the control_config.yaml file (or overridden in the private_control_config.yaml file): `process_configuration_methods/run_systems/strategy_name/`. For example: +The code to run each strategy's backtest is defined in the configuration parameter in the control_config.yaml file (or overridden in the private_control_config.yaml file): `process_configuration_methods/run_systems/strategy_name/`. For example: ``` process_configuration_methods: From c27f49fc54fe5a53cb8c61d7ee9b178f02de8a35 Mon Sep 17 00:00:00 2001 From: Andy Geach Date: Wed, 17 Jan 2024 09:28:09 +0000 Subject: [PATCH 212/235] raise Exception, not str --- systems/forecast_combine.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/systems/forecast_combine.py b/systems/forecast_combine.py index 6c49094ae6..407da9f67e 100755 --- a/systems/forecast_combine.py +++ b/systems/forecast_combine.py @@ -1492,7 +1492,7 @@ def _get_fixed_fdm_scalar_value_from_config( ) else: log.critical(error_msg, instrument_code=instrument_code) - raise (error_msg) + raise Exception(error_msg) return fixed_div_mult From 64d5f0b26cef24f7d75d08862175546da1b54f0c Mon Sep 17 00:00:00 2001 From: Andy Geach Date: Wed, 17 Jan 2024 09:30:59 +0000 Subject: [PATCH 213/235] Can only see emails now, not logs or errors --- syslogdiag/email_via_db_interface.py | 2 +- sysproduction/interactive_diagnostics.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/syslogdiag/email_via_db_interface.py b/syslogdiag/email_via_db_interface.py index ebeea5625e..0c6155e557 100644 --- a/syslogdiag/email_via_db_interface.py +++ b/syslogdiag/email_via_db_interface.py @@ -110,7 +110,7 @@ def check_if_sent_in_last_day(last_time_email_sent: datetime.datetime): def send_warning_email(data, subject): - body = "To reduce email load, won't send any more emails with this subject today. Use 'interactive_diagnostics', 'logs, emails, and errors' to see stored messages" + body = "To reduce email load, won't send any more emails with this subject today. Use 'interactive_diagnostics', 'Emails' to see stored messages" send_email_and_record_date_or_store_on_fail(data, body, subject) diff --git a/sysproduction/interactive_diagnostics.py b/sysproduction/interactive_diagnostics.py index 0aec5b4502..d1e688352c 100644 --- a/sysproduction/interactive_diagnostics.py +++ b/sysproduction/interactive_diagnostics.py @@ -81,7 +81,7 @@ def interactive_diagnostics(): top_level_menu_of_options = { 0: "backtest objects", 1: "View instrument configuration", - 2: "logs, emails, and errors", + 2: "Emails", 3: "View prices", 4: "View capital", 5: "View positions & orders", From e2a8119eb9a1f2c00b116f8d5c231774288c5426 Mon Sep 17 00:00:00 2001 From: Andy Geach Date: Wed, 17 Jan 2024 09:48:00 +0000 Subject: [PATCH 214/235] fix menu ID for broker orders --- sysproduction/interactive_order_stack.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sysproduction/interactive_order_stack.py b/sysproduction/interactive_order_stack.py index aeebab448a..e9d0cf0440 100644 --- a/sysproduction/interactive_order_stack.py +++ b/sysproduction/interactive_order_stack.py @@ -429,7 +429,7 @@ def create_manual_trade(data): ) print( - "For instant execution, you may want to do menu [1] create orders, menu [13] create broker orders" + "For instant execution, you may want to do menu [1] create orders, menu [12] create broker orders" ) return None From d0f3f8c81fb5a0a90753c197b5ebc1b8be690295 Mon Sep 17 00:00:00 2001 From: Andy Geach Date: Wed, 17 Jan 2024 09:48:39 +0000 Subject: [PATCH 215/235] ago -> algo --- sysexecution/order_stacks/contract_order_stack.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/sysexecution/order_stacks/contract_order_stack.py b/sysexecution/order_stacks/contract_order_stack.py index d5b7c91e96..db18eb5576 100644 --- a/sysexecution/order_stacks/contract_order_stack.py +++ b/sysexecution/order_stacks/contract_order_stack.py @@ -24,7 +24,9 @@ def add_controlling_algo_ref(self, order_id: int, control_algo_ref: str): existing_order = self.get_order_with_id_from_stack(order_id) if existing_order is missing_order: - error_msg = "Can't add controlling ago as order %d doesn't exist" % order_id + error_msg = ( + "Can't add controlling algo as order %d doesn't exist" % order_id + ) self.log.warning(error_msg) raise missingOrder(error_msg) @@ -48,7 +50,9 @@ def add_controlling_algo_ref(self, order_id: int, control_algo_ref: str): def release_order_from_algo_control(self, order_id: int): existing_order = self.get_order_with_id_from_stack(order_id) if existing_order is missing_order: - error_msg = "Can't add controlling ago as order %d doesn't exist" % order_id + error_msg = ( + "Can't add controlling algo as order %d doesn't exist" % order_id + ) self.log.warning(error_msg) raise missingOrder(error_msg) From bb1eb5f29b6cc7cc7a2740f8ba9bbd5190e1371c Mon Sep 17 00:00:00 2001 From: Andy Geach Date: Wed, 17 Jan 2024 09:49:50 +0000 Subject: [PATCH 216/235] missing ) --- sysproduction/interactive_order_stack.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sysproduction/interactive_order_stack.py b/sysproduction/interactive_order_stack.py index e9d0cf0440..281228830e 100644 --- a/sysproduction/interactive_order_stack.py +++ b/sysproduction/interactive_order_stack.py @@ -212,7 +212,7 @@ def create_balance_trade(data): def get_broker_order_details_for_balance_trade(data: dataBlob) -> brokerOrder: ans = true_if_answer_is_yes( - "Auto close an existing position (if not, manually enter details?" + "Auto close an existing position (if not, manually enter details)?" ) if ans: ( From 1d247c417d8ab24b33be92bee7a3a4fd05e3ec86 Mon Sep 17 00:00:00 2001 From: Andy Geach Date: Wed, 17 Jan 2024 10:15:00 +0000 Subject: [PATCH 217/235] type not constructor --- sysdata/production/capital.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sysdata/production/capital.py b/sysdata/production/capital.py index facb8f996d..7cb974b063 100644 --- a/sysdata/production/capital.py +++ b/sysdata/production/capital.py @@ -309,7 +309,7 @@ def get_total_capital(self) -> pd.Series: def get_current_accumulated_pandl(self) -> float: return self.capital_data.get_current_pandl_account() - def get_profit_and_loss_account(self) -> pd.Series(): + def get_profit_and_loss_account(self) -> pd.Series: return self.capital_data.get_profit_and_loss_account_pd_series() def get_maximum_account(self) -> pd.Series: From ab64288c9bc5af9217b20e84b0027d8f52bed958 Mon Sep 17 00:00:00 2001 From: Andy Geach Date: Wed, 17 Jan 2024 10:15:38 +0000 Subject: [PATCH 218/235] black --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 0c49a280fe..0820d53cbc 100755 --- a/setup.py +++ b/setup.py @@ -98,7 +98,7 @@ def dir_this_file(): "statsmodels==0.14.0", "PyPDF2>=2.5.0", "pyarrow>=14.0.1", - "scikit-learn>1.3.0" + "scikit-learn>1.3.0", ], tests_require=["nose", "flake8"], extras_require=dict(), From 08506bcaf95a78a063b0e94d7d7be124e0f83460 Mon Sep 17 00:00:00 2001 From: Andy Geach Date: Wed, 17 Jan 2024 10:17:27 +0000 Subject: [PATCH 219/235] print branch name in job --- .github/workflows/slow-test-develop.yml | 2 +- .github/workflows/slow-test-master.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/slow-test-develop.yml b/.github/workflows/slow-test-develop.yml index 9d1ea46dd3..55b45fa9d9 100644 --- a/.github/workflows/slow-test-develop.yml +++ b/.github/workflows/slow-test-develop.yml @@ -1,4 +1,4 @@ -name: Slow test +name: Slow test (develop) on: schedule: diff --git a/.github/workflows/slow-test-master.yml b/.github/workflows/slow-test-master.yml index db0ee9e0db..8b2b2d6dbb 100644 --- a/.github/workflows/slow-test-master.yml +++ b/.github/workflows/slow-test-master.yml @@ -1,4 +1,4 @@ -name: Slow test +name: Slow test (master) on: schedule: From bec93c5f5f96d8d230b2670263337be3106039f4 Mon Sep 17 00:00:00 2001 From: Andy Geach Date: Wed, 17 Jan 2024 11:17:43 +0000 Subject: [PATCH 220/235] remove unused method _resolve_names_and_add() --- sysdata/data_blob.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/sysdata/data_blob.py b/sysdata/data_blob.py index d54f348fb8..22e9068768 100644 --- a/sysdata/data_blob.py +++ b/sysdata/data_blob.py @@ -237,10 +237,6 @@ def _get_specific_logger(self, class_object): return log - def _resolve_names_and_add(self, resolved_instance, new_name: str): - attr_name = self._get_new_name(class_name) - self._add_new_class_with_new_name(resolved_instance, attr_name) - def _get_new_name(self, class_name: str, use_prefix: str = arg_not_supplied) -> str: split_up_name = camel_case_split(class_name) attr_name = identifying_name( From e08eaa8fddfef6bdbf5542dbd43ce05889036336 Mon Sep 17 00:00:00 2001 From: Rob Carver Date: Thu, 18 Jan 2024 08:45:18 +0000 Subject: [PATCH 221/235] attempt to fix log error --- sysdata/parquet/parquet_futures_per_contract_prices.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/sysdata/parquet/parquet_futures_per_contract_prices.py b/sysdata/parquet/parquet_futures_per_contract_prices.py index 580574c226..c508e715ba 100644 --- a/sysdata/parquet/parquet_futures_per_contract_prices.py +++ b/sysdata/parquet/parquet_futures_per_contract_prices.py @@ -86,7 +86,6 @@ def _write_prices_at_frequency_for_contract_object_no_checking( futures_price_data: futuresContractPrices, frequency: Frequency, ): - log = futures_contract_object.log(self.log) ident = from_contract_and_freq_to_key( futures_contract_object, frequency=frequency ) @@ -98,14 +97,15 @@ def _write_prices_at_frequency_for_contract_object_no_checking( data_to_write=futures_price_data_as_pd, ) - log.debug( + self.log.debug( "Wrote %s lines of prices for %s at %s to %s" % ( len(futures_price_data), str(futures_contract_object.key), str(frequency), str(self), - ) + ), + **futures_contract_object.log_attributes() ) def get_contracts_with_merged_price_data(self) -> listOfFuturesContracts: From 130a22c7352896f0edf5ff8adf6224ee658c346d Mon Sep 17 00:00:00 2001 From: Andy Geach Date: Thu, 18 Jan 2024 08:53:06 +0000 Subject: [PATCH 222/235] replace futures_contract_object.log() with temp log_attributes() --- .../parquet_futures_per_contract_prices.py | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) diff --git a/sysdata/parquet/parquet_futures_per_contract_prices.py b/sysdata/parquet/parquet_futures_per_contract_prices.py index 852ea2da16..6fa5566e76 100644 --- a/sysdata/parquet/parquet_futures_per_contract_prices.py +++ b/sysdata/parquet/parquet_futures_per_contract_prices.py @@ -86,7 +86,6 @@ def _write_prices_at_frequency_for_contract_object_no_checking( futures_price_data: futuresContractPrices, frequency: Frequency, ): - log = futures_contract_object.log(self.log) ident = from_contract_and_freq_to_key( futures_contract_object, frequency=frequency ) @@ -98,14 +97,16 @@ def _write_prices_at_frequency_for_contract_object_no_checking( data_to_write=futures_price_data_as_pd, ) - log.debug( + self.log.debug( "Wrote %s lines of prices for %s at %s to %s" % ( len(futures_price_data), str(futures_contract_object.key), str(frequency), str(self), - ) + ), + **futures_contract_object.log_attributes(), + method="temp", ) def get_contracts_with_merged_price_data(self) -> listOfFuturesContracts: @@ -187,17 +188,17 @@ def _delete_merged_prices_for_contract_object_with_no_checks_be_careful( def _delete_prices_at_frequency_for_contract_object_with_no_checks_be_careful( self, futures_contract_object: futuresContract, frequency: Frequency ): - log = futures_contract_object.log(self.log) - ident = from_contract_and_freq_to_key( contract=futures_contract_object, frequency=frequency ) self.parquet.delete_data_given_data_type_and_identifier( data_type=CONTRACT_COLLECTION, identifier=ident ) - log.debug( + self.log.debug( "Deleted all prices for %s from %s" - % (futures_contract_object.key, str(self)) + % (futures_contract_object.key, str(self)), + **futures_contract_object.log_attributes(), + method="temp", ) From 74bd122aa16cef7acd78e079bffc8ecb94f8391b Mon Sep 17 00:00:00 2001 From: Todd Gibson <3578666+tgibson11@users.noreply.github.com> Date: Mon, 22 Jan 2024 10:20:57 -0700 Subject: [PATCH 223/235] Correct IBEX spread cost entry --- data/futures/csvconfig/spreadcosts.csv | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/data/futures/csvconfig/spreadcosts.csv b/data/futures/csvconfig/spreadcosts.csv index cdb269329d..95b8372b3f 100644 --- a/data/futures/csvconfig/spreadcosts.csv +++ b/data/futures/csvconfig/spreadcosts.csv @@ -266,7 +266,7 @@ HSCEI-DIV,0.0 HUF,6.5000000000000004e-06 HUFEUR,0.0 IBEX_mini,2.5 -IBXEX,0.0 +IBEX,0.0 IG,0.029 IND-BANK,0.0 IND-FIN,0.0 From f24c8c339167bbfe347f39ada621f86c2e0c70a9 Mon Sep 17 00:00:00 2001 From: Andy Geach Date: Tue, 23 Jan 2024 09:55:01 +0000 Subject: [PATCH 224/235] initial --- docs/recent_changes.md | 24 ++++++++++++++++++++++++ 1 file changed, 24 insertions(+) create mode 100644 docs/recent_changes.md diff --git a/docs/recent_changes.md b/docs/recent_changes.md new file mode 100644 index 0000000000..b44cfeafdf --- /dev/null +++ b/docs/recent_changes.md @@ -0,0 +1,24 @@ +# Recent changes + +There are several major changes to the application that are not fully reflected in the docs yet. See below for the Issues or Discussions where covered: + +### Parquet / Arctic + +### More recent dependency versions + +### Roll states + +### Instrument and forecast weight config as hierarchy + +### Instrument and roll config moved to CSV storage + +### Price collection all day + +### Changes to timing of production processes + +### Price collection all day + +### Separate daily and hourly prices + + + From 8960ac47c31e096350fd77a4d1cd014a3e6c1723 Mon Sep 17 00:00:00 2001 From: Andy Geach Date: Tue, 23 Jan 2024 14:42:12 +0000 Subject: [PATCH 225/235] new docs doc outlining some recent changes --- docs/recent_changes.md | 41 ++++++++++++++++++++++++++++++++++------- 1 file changed, 34 insertions(+), 7 deletions(-) diff --git a/docs/recent_changes.md b/docs/recent_changes.md index b44cfeafdf..94f812df65 100644 --- a/docs/recent_changes.md +++ b/docs/recent_changes.md @@ -1,24 +1,51 @@ # Recent changes -There are several major changes to the application that are not fully reflected in the docs yet. See below for the Issues or Discussions where covered: +There are several major changes to the application that are not fully reflected in the docs yet. See below for links to discussions or issues where covered: ### Parquet / Arctic +* Nov 2023 +* Default behaviour is now to use Parquet for persistence of timeseries data. Staying with Arctic is still possible with manual changes +* Read more [here](https://github.com/robcarver17/pysystemtrade/discussions/1290), and [here](https://github.com/robcarver17/pysystemtrade/discussions/1291) ### More recent dependency versions +* Nov 2023 +* More recent versions of Python (3.10), Pandas (2) are supported +* Read more [here](https://github.com/robcarver17/pysystemtrade/discussions/1293) -### Roll states +### Roll states, auto rolling, roll rules +* Jul 2023 +* Contract rolling behaviour updated +* Read more [here](https://github.com/robcarver17/pysystemtrade/issues/1198), [here](https://github.com/robcarver17/pysystemtrade/issues/931), and [here](https://github.com/robcarver17/pysystemtrade/issues/1193) -### Instrument and forecast weight config as hierarchy +### No market data +* Jul 2023 +* Easier trading without market data subscriptions +* Read more [here](https://github.com/robcarver17/pysystemtrade/issues/1165), [here](https://github.com/robcarver17/pysystemtrade/issues/1016), and the `algo_overrides` section in [defaults.yml](https://github.com/robcarver17/pysystemtrade/blob/master/sysdata/config/defaults.yaml) + +### Instrument and forecast weight config as hierarchy +* Jun 2023 +* Easier way to specify weights +* Read more [here](https://github.com/robcarver17/pysystemtrade/discussions/1160) and [here](https://github.com/robcarver17/pysystemtrade/issues/1162) ### Instrument and roll config moved to CSV storage +* Mar 2023 +* Persistence of instrument and roll config moved from MongoDB to CSV +* Read more [here](https://github.com/robcarver17/pysystemtrade/discussions/1054) -### Price collection all day +### Development processes +* Mar 2023 +* Now two branches: `master` is stable, develop work happens on `develop`. Branches for PRs should be made from `develop` +* Read more [here](https://github.com/robcarver17/pysystemtrade/discussions/1069) ### Changes to timing of production processes - -### Price collection all day +* Jan 2023 +* Timing of some daily production processes adjusted +* Read more [here](https://github.com/robcarver17/pysystemtrade/discussions/913), [here](https://github.com/robcarver17/pysystemtrade/discussions/956), and [here](https://github.com/robcarver17/pysystemtrade/discussions/961) ### Separate daily and hourly prices - +* Aug 2022 +* Daily and hourly price data are now stored separately +* Read more [here](https://github.com/robcarver17/pysystemtrade/discussions/756) + From 3180d8cd92490d18f927a1cb8c72b96da5df83db Mon Sep 17 00:00:00 2001 From: Andy Geach Date: Tue, 23 Jan 2024 14:43:01 +0000 Subject: [PATCH 226/235] installation doc updated for new dependencies --- docs/installation.md | 183 ++++++++++++++++--------------------------- 1 file changed, 68 insertions(+), 115 deletions(-) diff --git a/docs/installation.md b/docs/installation.md index 6ab2562c96..b796d1a5b6 100644 --- a/docs/installation.md +++ b/docs/installation.md @@ -2,7 +2,7 @@ ## Introduction -This project has some quirks relating to installation and dependencies. Mostly, they're to do with the reliance on Arctic. That project has very specific requirements with some versions of dependencies which are becoming seriously out of date. It makes most sense to have a specific Python installation for this project, with the dependencies isolated from those used by other projects. This guide shows the quickest and easiest way to manage that +This guide shows the quickest and easiest way to install the project in a virtual environment ## pyenv @@ -15,12 +15,12 @@ Installation instructions for pyenv are here: https://github.com/pyenv/pyenv#installation -## Python 3.8 +## Python 3.10 -pysystemtrade currently requires Python 3.8, so once pyenv is installed, the first step is to get that. Get the latest 3.8.x version, at the time of writing it is 3.8.16 +pysystemtrade currently requires Python 3.10, so once pyenv is installed, the first step is to get that. Get the latest 3.10.x version, at the time of writing it is 3.10.13 ``` -$ pyenv install 3.8.16 +$ pyenv install 3.10 ``` Once complete you should be able to see the new version in the output of `pyenv versions` @@ -33,11 +33,12 @@ $ pyenv versions 3.8.6 3.8.10 3.8.13 -* 3.8.15 + 3.8.15 3.8.16 3.9.6 3.9.13 3.10.4 +* 3.10.13 ``` Your output will be different, it's just an example @@ -59,19 +60,18 @@ otherwise, you'll want the main repo git clone https://github.com/robcarver17/pysystemtrade.git ``` -Now we will want to let pyenv know that we want to use Python 3.8 for this project +Now we will want to let pyenv know that we want to use Python 3.10 for this project ``` cd pysystemtrade -pyenv local 3.8.16 +pyenv local 3.10.13 ``` -this creates a file at the top level of the project `.python-version` that lets the Python execution environment know to use version 3.8.16. We can check this by running python +this creates a file at the top level of the project `.python-version` that lets the Python execution environment know to use version 3.10.13. We can check this by running python ``` $ python -Python 3.8.16 (default, Mar 19 2023, 11:38:42) -[Clang 14.0.0 (clang-1400.0.29.202)] +Python 3.10.13 (main, Nov 27 2023, 11:13:49) [Clang 14.0.0 (clang-1400.0.29.202)] Type "help", "copyright", "credits" or "license" for more information. >>> < ctrl-D to exit > @@ -79,27 +79,27 @@ Type "help", "copyright", "credits" or "license" for more information. ## venv -https://docs.python.org/3.8/library/venv.html +https://docs.python.org/3.10/library/venv.html -Now we want to create a virtual env (venv) for the project. Doing this will keep all the dependencies for pysystemtrade (some of which are pretty old) separate from your other python projects +Now we want to create a virtual env (venv) for the project. Doing this will keep all the dependencies for pysystemtrade separate from your other python projects ``` -$ python -m venv venv/3.8.16 +$ python -m venv venv/3.10.13 ``` This will create a brand new, isolated Python environment *inside the pysystemtrade project* at the directory -`/pysystemtrade/venv/3.8.6`. You can give your environment any name (the *venv/3.8.6* bit). +`/pysystemtrade/venv/3.10.13`. You can give your environment any name (the *venv/3.10.13* bit). Now activate the virtual environment ``` -source venv/3.8.16/bin/activate +source venv/3.10.13/bin/activate ``` Once your virtual env is activated, the prompt will change. It will look something like ``` -(3.8.16) $ +(3.10.13) $ ``` This reminds you that you're in a venv. (You can exit the venv at any time by running `deactivate`) @@ -109,114 +109,67 @@ This reminds you that you're in a venv. (You can exit the venv at any time by ru Now it's time to start setting up the venv. First check to see what is there ``` -(3.8.16) $ pip list +(3.10.13) $ pip list ``` You will probably be prompted to update pip at this time. Do whatever command it suggests. -Now install *wheel* - -``` -(3.8.16) $ pip install wheel -``` - -### Linux, Windows, MacOS (Intel) - -Install *cython* - -``` -(3.8.16) $ pip install cython -``` - And now install the dependencies ``` -(3.8.16) $ pip install -r requirements.txt -``` - -### MacOS (ARM) - -If you're running MacOS on one of the new ARM chips, the process is more complex. You'll need Homebrew and the Apple XCode Commandline Development Tools, configured for ARM. Doing that is beyond the scope of this document, type `homebrew apple xcode command line tools` into your favourite search engine. Once installed and configured, install *cython*: - -``` -(3.8.16) $ OPENBLAS="$(brew --prefix openblas)" MACOSX_DEPLOYMENT_TARGET=12.6 python -m pip install cython --no-use-pep517 -``` - -Then the key dependencies - -``` -(3.8.16) $ OPENBLAS="$(brew --prefix openblas)" MACOSX_DEPLOYMENT_TARGET=12.6 python -m pip install "numpy>=1.19.4,<1.24.0" --no-use-pep517 -(3.8.16) $ OPENBLAS="$(brew --prefix openblas)" MACOSX_DEPLOYMENT_TARGET=12.6 python -m pip install scipy --no-use-pep517 -(3.8.16) $ OPENBLAS="$(brew --prefix openblas)" MACOSX_DEPLOYMENT_TARGET=12.6 python -m pip install pandas==1.0.5 --no-use-pep517 -(3.8.16) $ OPENBLAS="$(brew --prefix openblas)" MACOSX_DEPLOYMENT_TARGET=12.6 python -m pip install statsmodels==0.12.2 --no-use-pep517 -``` - -Then the remaining dependencies - -``` -(3.8.16) $ pip install -r requirements.txt +(3.10.13) $ pip install -r requirements.txt ``` -### Check dependencies, all OSs +### Check dependencies Check what is installed, should look something like ``` -(3.8.16) $ pip list -Package Version ---------------------- ----------- -arctic 1.79.2 -attrs 22.2.0 -backports.zoneinfo 0.2.1 -click 8.1.3 -contourpy 1.0.7 -cycler 0.11.0 -Cython 0.29.33 -decorator 5.1.1 -enum-compat 0.0.3 -eventkit 1.0.0 -exceptiongroup 1.1.1 -Flask 2.2.3 -fonttools 4.39.2 -ib-insync 0.9.70 -importlib-metadata 6.1.0 -importlib-resources 5.12.0 -iniconfig 2.0.0 -itsdangerous 2.1.2 -Jinja2 3.1.2 -kiwisolver 1.4.4 -lz4 4.3.2 -MarkupSafe 2.1.2 -matplotlib 3.7.1 -mockextras 1.0.2 -nest-asyncio 1.5.6 -numpy 1.23.5 -packaging 23.0 -pandas 1.0.5 -patsy 0.5.3 -Pillow 9.4.0 -pip 23.0.1 -pluggy 1.0.0 -psutil 5.6.6 -pymongo 3.9.0 -pyparsing 3.0.9 -PyPDF2 3.0.1 -pytest 7.2.2 -python-dateutil 2.8.2 -pytz 2022.7.1 -pytz-deprecation-shim 0.1.0.post0 -PyYAML 5.4 -scipy 1.10.1 -setuptools 56.0.0 -six 1.16.0 -statsmodels 0.12.2 -tomli 2.0.1 -typing_extensions 4.5.0 -tzdata 2022.7 -tzlocal 4.3 -Werkzeug 2.2.3 -wheel 0.40.0 -zipp 3.15.0 +(3.10.13) % pip list +Package Version +--------------- ------------ +blinker 1.7.0 +click 8.1.7 +contourpy 1.2.0 +cycler 0.12.1 +eventkit 1.0.3 +exceptiongroup 1.2.0 +Flask 3.0.1 +fonttools 4.47.2 +ib-insync 0.9.86 +iniconfig 2.0.0 +itsdangerous 2.1.2 +Jinja2 3.1.3 +joblib 1.3.2 +kiwisolver 1.4.5 +MarkupSafe 2.1.4 +matplotlib 3.8.2 +nest-asyncio 1.6.0 +numpy 1.26.3 +packaging 23.2 +pandas 2.1.3 +patsy 0.5.6 +pillow 10.2.0 +pip 23.3.2 +pluggy 1.3.0 +psutil 5.6.6 +pyarrow 15.0.0 +pymongo 3.11.3 +pyparsing 3.1.1 +PyPDF2 3.0.1 +pytest 7.4.4 +python-dateutil 2.8.2 +pytz 2023.3.post1 +PyYAML 5.3.1 +scikit-learn 1.4.0 +scipy 1.12.0 +setuptools 65.5.0 +six 1.16.0 +statsmodels 0.14.0 +threadpoolctl 3.2.0 +tomli 2.0.1 +tzdata 2023.4 +Werkzeug 3.0.1 ``` ## pysystemtrade @@ -224,18 +177,18 @@ zipp 3.15.0 And finally, install the project itself ``` -(3.8.16) $ python setup.py develop +(3.10.13) $ python setup.py develop ``` Check stuff works ``` -(3.8.16) $ python +(3.10.13) $ python >>> >>> from sysdata.sim.csv_futures_sim_data import csvFuturesSimData +Configuring sim logging >>> data=csvFuturesSimData() -2023-03-19 12:29:18 {'type': 'csvFuturesSimData'} [Warning] No datapaths provided for .csv, will use defaults (may break in production, should be fine in sim) >>> data -csvFuturesSimData object with 208 instruments +csvFuturesSimData object with 249 instruments >>> ``` From 32e3037e7a5a306c7bf7bfcb342ee86e04482cbf Mon Sep 17 00:00:00 2001 From: Andy Geach Date: Tue, 23 Jan 2024 14:43:27 +0000 Subject: [PATCH 227/235] link to new recent_changes doc --- docs/backtesting.md | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/backtesting.md b/docs/backtesting.md index c9b8b7d049..df5cc2ef20 100644 --- a/docs/backtesting.md +++ b/docs/backtesting.md @@ -5,6 +5,7 @@ Related documents: - [Storing futures and spot FX data](/docs/data.md) - [Using pysystemtrade as a production trading environment](/docs/production.md) - [Connecting pysystemtrade to interactive brokers](/docs/IB.md) +- [Recent undocumented changes](/docs/recent_changes.md) This guide is divided into four parts. The first ['How do I?'](#how_do_i) From 0f9a06a72e76261b4840464f3d28a6cd4d51616d Mon Sep 17 00:00:00 2001 From: Andy Geach Date: Tue, 23 Jan 2024 14:43:59 +0000 Subject: [PATCH 228/235] link to new recent_changes doc, TOC rebuilt --- docs/production.md | 29 +++++++++++++++-------------- 1 file changed, 15 insertions(+), 14 deletions(-) diff --git a/docs/production.md b/docs/production.md index ba529f8605..666ae4b1eb 100644 --- a/docs/production.md +++ b/docs/production.md @@ -18,6 +18,7 @@ And documents you should read after this one: - [Instruments](/docs/instruments.md) - [Dashboard and monitor](/docs/dashboard_and_monitor.md) - [Production strategy changes](/docs/production_strategy_changes.md) +- [Recent undocumented changes](/docs/recent_changes.md) *IMPORTANT: Make sure you know what you are doing. All financial trading offers the possibility of loss. Leveraged trading, such as futures trading, may result in you losing all your money, and still owing more. Backtested results are no guarantee of future performance. No warranty is offered or implied for this software. I can take no responsibility for any losses caused by live trading using pysystemtrade. Use at your own risk.* @@ -50,9 +51,15 @@ Table of Contents * [Echos: stdout output](#echos-stdout-output) * [Cleaning old echo files](#cleaning-old-echo-files) * [Logging](#logging) + * [socket](#socket) + * [socket server as a service](#socket-server-as-a-service) + * [console](#console) + * [email](#email) * [Adding logging to your code](#adding-logging-to-your-code) - * [Getting log data back](#getting-log-data-back) + * [Examples](#examples) * [Cleaning old logs](#cleaning-old-logs) + * [Echos](#echos) + * [Logs](#logs) * [Reporting](#reporting) * [Positions and order levels](#positions-and-order-levels) * [Instrument level](#instrument-level) @@ -61,12 +68,9 @@ Table of Contents * [The journey of an order](#the-journey-of-an-order) * [Optimal positions](#optimal-positions) * [Optimal position for roll orders](#optimal-position-for-roll-orders) - * [Optimal positions for intra-instrument spread orders](#optimal-positions-for-intra-instrument-spread-orders) - * [Optimal positions for intra-instrument spread orders](#optimal-positions-for-intra-instrument-spread-orders-1) * [Strategy order handling](#strategy-order-handling) * [Instrument orders in detail:](#instrument-orders-in-detail) * [Strategy order handling for roll orders](#strategy-order-handling-for-roll-orders) - * [Strategy order handling for spread orders](#strategy-order-handling-for-spread-orders) * [Overrides](#overrides) * [Stack handler](#stack-handler) * [Instrument order netting (to be implemented)](#instrument-order-netting-to-be-implemented) @@ -75,10 +79,6 @@ Table of Contents * [Contract order creation - conditional orders](#contract-order-creation---conditional-orders) * [Contract order creation - passive roll status](#contract-order-creation---passive-roll-status) * [Instrument and contract order creation - active roll orders](#instrument-and-contract-order-creation---active-roll-orders) - * [Contract order creation: Intra market spread](#contract-order-creation-intra-market-spread) - * [Intra market spreads and rolls](#intra-market-spreads-and-rolls) - * [Contract order creation: Inter market spread](#contract-order-creation-inter-market-spread) - * [Inter market spreads and rolls](#inter-market-spreads-and-rolls) * [Manual trades](#manual-trades) * [Broker order creation and execution](#broker-order-creation-and-execution) * [Before an order is traded](#before-an-order-is-traded) @@ -110,6 +110,7 @@ Table of Contents * [Get spot FX data from interactive brokers, write to MongoDB (Daily)](#get-spot-fx-data-from-interactive-brokers-write-to-mongodb-daily) * [Update sampled contracts (Daily)](#update-sampled-contracts-daily) * [Update futures contract historical price data (Daily)](#update-futures-contract-historical-price-data-daily) + * [Set times when different regions download prices](#set-times-when-different-regions-download-prices) * [Update multiple and adjusted prices (Daily)](#update-multiple-and-adjusted-prices-daily) * [Update capital and p&l by polling brokerage account](#update-capital-and-pl-by-polling-brokerage-account) * [Allocate capital to strategies](#allocate-capital-to-strategies) @@ -135,8 +136,8 @@ Table of Contents * [View processes](#view-processes) * [Change status of process](#change-status-of-process) * [Global status change](#global-status-change) - * [Mark as close](#mark-as-finished) - * [Mark all dead processes as close](#mark-all-dead-processes-as-finished) + * [Mark as close](#mark-as-close) + * [Mark all dead processes as close](#mark-all-dead-processes-as-close) * [View process configuration](#view-process-configuration) * [Update configuration](#update-configuration) * [Interactive diagnostics](#interactive-diagnostics) @@ -148,8 +149,6 @@ Table of Contents * [Reports](#reports) * [Logs, errors, emails](#logs-errors-emails) * [View stored emails](#view-stored-emails) - * [View errors](#view-errors) - * [View logs](#view-logs) * [View prices](#view-prices) * [View capital](#view-capital) * [Positions and orders](#positions-and-orders) @@ -186,7 +185,7 @@ Table of Contents * [Backup state files](#backup-state-files) * [Backup mongo dump](#backup-mongo-dump) * [Start up script](#start-up-script) - * [Scripts under other (non\-linux) operating systems](#scripts-under-other-non-linux-operating-systems) + * [Scripts under other (non-linux) operating systems](#scripts-under-other-non-linux-operating-systems) * [Scheduling](#scheduling) * [Issues to consider when constructing the schedule](#issues-to-consider-when-constructing-the-schedule) * [Choice of scheduling systems](#choice-of-scheduling-systems) @@ -208,7 +207,8 @@ Table of Contents * [System backtest .yaml config file(s)](#system-backtest-yaml-config-files) * [Control config files](#control-config-files) * [Broker and data source specific configuration files](#broker-and-data-source-specific-configuration-files) - * [Only used when setting up the system](#only-used-when-setting-up-the-system) + * [Instrument and roll configuration](#instrument-and-roll-configuration) + * [Set up configuration](#set-up-configuration) * [Capital](#capital) * [Large changes in capital](#large-changes-in-capital) * [Withdrawals and deposits of cash or stock](#withdrawals-and-deposits-of-cash-or-stock) @@ -235,6 +235,7 @@ Table of Contents * [Risk report](#risk-report) * [Liquidity report](#liquidity-report) * [Costs report](#costs-report) + * [Customize report generation in the run_report process](#customize-report-generation-in-the-run_report-process) Created by [gh-md-toc](https://github.com/ekalinin/github-markdown-toc) From aa3ab67b07a331f2c6e69de04391e52e8fd96b3f Mon Sep 17 00:00:00 2001 From: Rob Carver Date: Thu, 8 Feb 2024 13:45:59 +0000 Subject: [PATCH 229/235] setup not properly merged --- data/futures/roll_calendars_csv/BITCOIN#1.csv | 73 +++++++++++++++++++ data/futures/roll_calendars_csv/BITCOIN#2.csv | 73 +++++++++++++++++++ data/futures/roll_calendars_csv/BITCOIN#3.csv | 73 +++++++++++++++++++ data/futures/roll_calendars_csv/EDOLLAR#1.csv | 5 +- .../futures/roll_calendars_csv/ETHEREUM#1.csv | 35 +++++++++ .../futures/roll_calendars_csv/ETHEREUM#2.csv | 35 +++++++++ .../futures/roll_calendars_csv/ETHEREUM#3.csv | 35 +++++++++ setup.py | 5 -- sysexecution/stack_handler/roll_orders.py | 1 - 9 files changed, 328 insertions(+), 7 deletions(-) create mode 100644 data/futures/roll_calendars_csv/BITCOIN#1.csv create mode 100644 data/futures/roll_calendars_csv/BITCOIN#2.csv create mode 100644 data/futures/roll_calendars_csv/BITCOIN#3.csv create mode 100644 data/futures/roll_calendars_csv/ETHEREUM#1.csv create mode 100644 data/futures/roll_calendars_csv/ETHEREUM#2.csv create mode 100644 data/futures/roll_calendars_csv/ETHEREUM#3.csv diff --git a/data/futures/roll_calendars_csv/BITCOIN#1.csv b/data/futures/roll_calendars_csv/BITCOIN#1.csv new file mode 100644 index 0000000000..e1d43380be --- /dev/null +++ b/data/futures/roll_calendars_csv/BITCOIN#1.csv @@ -0,0 +1,73 @@ +DATE_TIME,current_contract,next_contract,carry_contract +2017-12-18 15:00:00,20180100,20180200,20180200 +2018-01-18 00:00:00,20180200,20180300,20180300 +2018-02-15 02:00:00,20180300,20180400,20180400 +2018-03-19 01:00:00,20180400,20180500,20180500 +2018-04-17 08:00:00,20180500,20180600,20180600 +2018-05-18 07:00:00,20180600,20180700,20180700 +2018-06-25 07:00:00,20180700,20180800,20180800 +2018-07-18 04:00:00,20180800,20180900,20180900 +2018-08-19 23:00:00,20180900,20181000,20181000 +2018-09-17 12:00:00,20181000,20181100,20181100 +2018-10-18 04:00:00,20181100,20181200,20181200 +2018-11-16 09:00:00,20181200,20190100,20190100 +2018-12-18 08:00:00,20190100,20190200,20190200 +2019-01-18 18:00:00,20190200,20190300,20190300 +2019-02-15 10:00:00,20190300,20190400,20190400 +2019-03-18 10:00:00,20190400,20190500,20190500 +2019-04-17 03:00:00,20190500,20190600,20190600 +2019-05-17 00:00:00,20190600,20190700,20190700 +2019-06-17 00:00:00,20190700,20190800,20190800 +2019-07-18 00:00:00,20190800,20190900,20190900 +2019-08-19 15:00:00,20190900,20191000,20191000 +2019-09-17 05:00:00,20191000,20191100,20191100 +2019-10-18 00:00:00,20191100,20191200,20191200 +2019-11-18 14:00:00,20191200,20200100,20200100 +2019-12-18 14:00:00,20200100,20200200,20200200 +2020-01-17 00:00:00,20200200,20200300,20200300 +2020-02-17 00:00:00,20200300,20200400,20200400 +2020-03-18 00:00:00,20200400,20200500,20200500 +2020-04-17 00:00:00,20200500,20200600,20200600 +2020-05-18 00:00:00,20200600,20200700,20200700 +2020-06-17 12:00:00,20200700,20200800,20200800 +2020-07-17 01:00:00,20200800,20200900,20200900 +2020-08-18 00:00:00,20200900,20201000,20201000 +2020-09-17 00:00:00,20201000,20201100,20201100 +2020-10-19 00:00:00,20201100,20201200,20201200 +2020-11-17 00:00:00,20201200,20210100,20210100 +2020-12-18 01:00:00,20210100,20210200,20210200 +2021-01-18 00:00:00,20210200,20210300,20210300 +2021-02-15 00:00:00,20210300,20210400,20210400 +2021-03-18 00:00:00,20210400,20210500,20210500 +2021-04-16 00:00:00,20210500,20210600,20210600 +2021-05-18 01:00:00,20210600,20210700,20210700 +2021-06-17 02:00:00,20210700,20210800,20210800 +2021-07-19 08:00:00,20210800,20210900,20210900 +2021-08-18 01:00:00,20210900,20211000,20211000 +2021-09-17 02:00:00,20211000,20211100,20211100 +2021-10-18 01:00:00,20211100,20211200,20211200 +2021-11-17 04:00:00,20211200,20220100,20220100 +2021-12-17 01:00:00,20220100,20220200,20220200 +2022-01-18 01:00:00,20220200,20220300,20220300 +2022-02-15 01:00:00,20220300,20220400,20220400 +2022-03-18 01:00:00,20220400,20220500,20220500 +2022-04-18 01:00:00,20220500,20220600,20220600 +2022-05-18 03:00:00,20220600,20220700,20220700 +2022-06-17 02:00:00,20220700,20220800,20220800 +2022-07-18 01:00:00,20220800,20220900,20220900 +2022-08-18 01:00:00,20220900,20221000,20221000 +2022-09-16 14:00:00,20221000,20221100,20221100 +2022-10-18 01:00:00,20221100,20221200,20221200 +2022-11-17 23:00:00,20221200,20230100,20230100 +2022-12-19 16:00:00,20230100,20230200,20230200 +2023-01-18 14:30:00,20230200,20230300,20230300 +2023-02-15 15:00:00,20230300,20230400,20230400 +2023-03-17 13:30:00,20230400,20230500,20230500 +2023-04-17 14:30:00,20230500,20230600,20230600 +2023-05-18 15:00:00,20230600,20230700,20230700 +2023-06-16 20:00:00,20230700,20230800,20230800 +2023-07-18 14:30:00,20230800,20230900,20230900 +2023-08-18 14:30:00,20230900,20231000,20231000 +2023-09-18 18:00:00,20231000,20231100,20231100 +2023-10-18 14:30:00,20231100,20231200,20231200 +2023-11-17 14:30:00,20231200,20240100,20240100 diff --git a/data/futures/roll_calendars_csv/BITCOIN#2.csv b/data/futures/roll_calendars_csv/BITCOIN#2.csv new file mode 100644 index 0000000000..2c988d6e4b --- /dev/null +++ b/data/futures/roll_calendars_csv/BITCOIN#2.csv @@ -0,0 +1,73 @@ +DATE_TIME,current_contract,next_contract,carry_contract +2017-12-18 15:00:00,20180200,20180300,20180300 +2018-01-18 00:00:00,20180300,20180400,20180400 +2018-02-15 02:00:00,20180400,20180500,20180500 +2018-03-19 01:00:00,20180500,20180600,20180600 +2018-04-17 08:00:00,20180600,20180700,20180700 +2018-05-18 07:00:00,20180700,20180800,20180800 +2018-06-25 07:00:00,20180800,20180900,20180900 +2018-07-18 04:00:00,20180900,20181000,20181000 +2018-08-19 23:00:00,20181000,20181100,20181100 +2018-09-17 12:00:00,20181100,20181200,20181200 +2018-10-18 04:00:00,20181200,20190100,20190100 +2018-11-16 09:00:00,20190100,20190200,20190200 +2018-12-18 08:00:00,20190200,20190300,20190300 +2019-01-18 18:00:00,20190300,20190400,20190400 +2019-02-15 10:00:00,20190400,20190500,20190500 +2019-03-18 10:00:00,20190500,20190600,20190600 +2019-04-17 03:00:00,20190600,20190700,20190700 +2019-05-17 00:00:00,20190700,20190800,20190800 +2019-06-17 00:00:00,20190800,20190900,20190900 +2019-07-18 00:00:00,20190900,20191000,20191000 +2019-08-19 15:00:00,20191000,20191100,20191100 +2019-09-17 05:00:00,20191100,20191200,20191200 +2019-10-18 00:00:00,20191200,20200100,20200100 +2019-11-18 14:00:00,20200100,20200200,20200200 +2019-12-18 14:00:00,20200200,20200300,20200300 +2020-01-17 00:00:00,20200300,20200400,20200400 +2020-02-17 00:00:00,20200400,20200500,20200500 +2020-03-18 00:00:00,20200500,20200600,20200600 +2020-04-17 00:00:00,20200600,20200700,20200700 +2020-05-18 00:00:00,20200700,20200800,20200800 +2020-06-17 12:00:00,20200800,20200900,20200900 +2020-07-17 01:00:00,20200900,20201000,20201000 +2020-08-18 00:00:00,20201000,20201100,20201100 +2020-09-17 00:00:00,20201100,20201200,20201200 +2020-10-19 00:00:00,20201200,20210100,20210100 +2020-11-17 00:00:00,20210100,20210200,20210200 +2020-12-18 01:00:00,20210200,20210300,20210300 +2021-01-18 00:00:00,20210300,20210400,20210400 +2021-02-15 00:00:00,20210400,20210500,20210500 +2021-03-18 00:00:00,20210500,20210600,20210600 +2021-04-16 00:00:00,20210600,20210700,20210700 +2021-05-18 01:00:00,20210700,20210800,20210800 +2021-06-17 02:00:00,20210800,20210900,20210900 +2021-07-19 08:00:00,20210900,20211000,20211000 +2021-08-18 01:00:00,20211000,20211100,20211100 +2021-09-17 02:00:00,20211100,20211200,20211200 +2021-10-18 01:00:00,20211200,20220100,20220100 +2021-11-17 04:00:00,20220100,20220200,20220200 +2021-12-17 01:00:00,20220200,20220300,20220300 +2022-01-18 01:00:00,20220300,20220400,20220400 +2022-02-15 01:00:00,20220400,20220500,20220500 +2022-03-18 01:00:00,20220500,20220600,20220600 +2022-04-18 01:00:00,20220600,20220700,20220700 +2022-05-18 03:00:00,20220700,20220800,20220800 +2022-06-17 02:00:00,20220800,20220900,20220900 +2022-07-18 01:00:00,20220900,20221000,20221000 +2022-08-18 01:00:00,20221000,20221100,20221100 +2022-09-16 14:00:00,20221100,20221200,20221200 +2022-10-18 01:00:00,20221200,20230100,20230100 +2022-11-17 23:00:00,20230100,20230200,20230200 +2022-12-19 16:00:00,20230200,20230300,20230300 +2023-01-18 14:30:00,20230300,20230400,20230400 +2023-02-15 15:00:00,20230400,20230500,20230500 +2023-03-17 13:30:00,20230500,20230600,20230600 +2023-04-17 14:30:00,20230600,20230700,20230700 +2023-05-18 15:00:00,20230700,20230800,20230800 +2023-06-16 20:00:00,20230800,20230900,20230900 +2023-07-18 14:30:00,20230900,20231000,20231000 +2023-08-18 14:30:00,20231000,20231100,20231100 +2023-09-18 18:00:00,20231100,20231200,20231200 +2023-10-18 14:30:00,20231200,20240100,20240100 +2023-11-17 14:30:00,20240100,20240200,20240200 diff --git a/data/futures/roll_calendars_csv/BITCOIN#3.csv b/data/futures/roll_calendars_csv/BITCOIN#3.csv new file mode 100644 index 0000000000..0a2363617e --- /dev/null +++ b/data/futures/roll_calendars_csv/BITCOIN#3.csv @@ -0,0 +1,73 @@ +DATE_TIME,current_contract,next_contract,carry_contract +2017-12-18 15:00:00,20180300,20180400,20180400 +2018-01-18 00:00:00,20180400,20180500,20180500 +2018-03-14 12:00:00,20180500,20180600,20180600 +2018-03-19 01:00:00,20180600,20180700,20180700 +2018-04-17 08:00:00,20180700,20180800,20180800 +2018-05-18 07:00:00,20180800,20180900,20180900 +2018-06-25 07:00:00,20180900,20181000,20181000 +2018-07-18 04:00:00,20181000,20181100,20181100 +2018-08-19 23:00:00,20181100,20181200,20181200 +2018-09-17 23:00:00,20181200,20190100,20190100 +2018-10-18 04:00:00,20190100,20190200,20190200 +2018-11-16 09:00:00,20190200,20190300,20190300 +2018-12-18 08:00:00,20190300,20190400,20190400 +2019-01-18 18:00:00,20190400,20190500,20190500 +2019-02-15 10:00:00,20190500,20190600,20190600 +2019-03-18 10:00:00,20190600,20190700,20190700 +2019-04-17 03:00:00,20190700,20190800,20190800 +2019-05-17 00:00:00,20190800,20190900,20190900 +2019-06-17 00:00:00,20190900,20191000,20191000 +2019-07-18 00:00:00,20191000,20191100,20191100 +2019-08-19 15:00:00,20191100,20191200,20191200 +2019-09-17 05:00:00,20191200,20200100,20200100 +2019-10-18 00:00:00,20200100,20200200,20200200 +2019-11-18 14:00:00,20200200,20200300,20200300 +2019-12-18 14:00:00,20200300,20200400,20200400 +2020-01-17 00:00:00,20200400,20200500,20200500 +2020-02-17 00:00:00,20200500,20200600,20200600 +2020-03-18 00:00:00,20200600,20200700,20200700 +2020-04-17 00:00:00,20200700,20200800,20200800 +2020-05-18 00:00:00,20200800,20200900,20200900 +2020-06-17 12:00:00,20200900,20201000,20201000 +2020-07-17 01:00:00,20201000,20201100,20201100 +2020-08-18 00:00:00,20201100,20201200,20201200 +2020-09-17 00:00:00,20201200,20210100,20210100 +2020-10-19 00:00:00,20210100,20210200,20210200 +2020-11-17 00:00:00,20210200,20210300,20210300 +2020-12-18 01:00:00,20210300,20210400,20210400 +2021-01-18 00:00:00,20210400,20210500,20210500 +2021-02-15 00:00:00,20210500,20210600,20210600 +2021-03-18 00:00:00,20210600,20210700,20210700 +2021-04-16 00:00:00,20210700,20210800,20210800 +2021-05-18 01:00:00,20210800,20210900,20210900 +2021-06-17 02:00:00,20210900,20211000,20211000 +2021-07-19 08:00:00,20211000,20211100,20211100 +2021-08-18 01:00:00,20211100,20211200,20211200 +2021-09-17 02:00:00,20211200,20220100,20220100 +2021-10-18 01:00:00,20220100,20220200,20220200 +2021-11-17 04:00:00,20220200,20220300,20220300 +2021-12-17 01:00:00,20220300,20220400,20220400 +2022-01-18 01:00:00,20220400,20220500,20220500 +2022-02-15 01:00:00,20220500,20220600,20220600 +2022-03-18 01:00:00,20220600,20220700,20220700 +2022-04-18 01:00:00,20220700,20220800,20220800 +2022-05-18 03:00:00,20220800,20220900,20220900 +2022-06-17 02:00:00,20220900,20221000,20221000 +2022-07-18 01:00:00,20221000,20221100,20221100 +2022-08-18 01:00:00,20221100,20221200,20221200 +2022-09-16 14:00:00,20221200,20230100,20230100 +2022-10-18 01:00:00,20230100,20230200,20230200 +2022-11-17 23:00:00,20230200,20230300,20230300 +2022-12-19 16:00:00,20230300,20230400,20230400 +2023-01-18 14:30:00,20230400,20230500,20230500 +2023-02-15 15:00:00,20230500,20230600,20230600 +2023-03-17 13:30:00,20230600,20230700,20230700 +2023-04-17 14:30:00,20230700,20230800,20230800 +2023-05-18 15:00:00,20230800,20230900,20230900 +2023-06-16 20:00:00,20230900,20231000,20231000 +2023-07-18 14:30:00,20231000,20231100,20231100 +2023-08-18 14:30:00,20231100,20231200,20231200 +2023-09-18 18:00:00,20231200,20240100,20240100 +2023-10-18 14:30:00,20240100,20240200,20240200 +2023-11-17 14:30:00,20240200,20240300,20240300 diff --git a/data/futures/roll_calendars_csv/EDOLLAR#1.csv b/data/futures/roll_calendars_csv/EDOLLAR#1.csv index b4da84b288..a74fdcb6c2 100644 --- a/data/futures/roll_calendars_csv/EDOLLAR#1.csv +++ b/data/futures/roll_calendars_csv/EDOLLAR#1.csv @@ -120,4 +120,7 @@ DATE_TIME,current_contract,next_contract,carry_contract 2022-03-21 13:00:00,20240300,20240600,20231200 2022-06-21 14:00:00,20240600,20240900,20240300 2022-09-20 14:30:00,20240900,20241200,20240600 -2022-11-03 13:30:00,20241200,20250300,20240900 +2022-12-20 14:30:00,20241200,20250300,20240900 +2023-03-20 13:30:00,20250300,20250600,20241200 +2023-06-20 23:00:00,20250600,20250900,20250300 +2023-06-20 23:00:00,20250900,20251200,20250600 diff --git a/data/futures/roll_calendars_csv/ETHEREUM#1.csv b/data/futures/roll_calendars_csv/ETHEREUM#1.csv new file mode 100644 index 0000000000..f0db7bcfe4 --- /dev/null +++ b/data/futures/roll_calendars_csv/ETHEREUM#1.csv @@ -0,0 +1,35 @@ +DATE_TIME,current_contract,next_contract,carry_contract +2021-02-07 23:00:00,20210200,20210300,20210300 +2021-02-08 19:00:00,20210300,20210400,20210400 +2021-03-08 01:00:00,20210400,20210500,20210500 +2021-04-06 01:00:00,20210500,20210600,20210600 +2021-05-07 12:00:00,20210600,20210700,20210700 +2021-06-07 02:00:00,20210700,20210800,20210800 +2021-07-07 02:00:00,20210800,20210900,20210900 +2021-08-06 01:00:00,20210900,20211000,20211000 +2021-09-06 03:00:00,20211000,20211100,20211100 +2021-10-07 01:00:00,20211100,20211200,20211200 +2021-11-05 13:00:00,20211200,20220100,20220100 +2021-12-07 12:00:00,20220100,20220200,20220200 +2022-01-07 03:00:00,20220200,20220300,20220300 +2022-02-04 03:00:00,20220300,20220400,20220400 +2022-03-07 08:00:00,20220400,20220500,20220500 +2022-04-06 01:00:00,20220500,20220600,20220600 +2022-05-06 09:00:00,20220600,20220700,20220700 +2022-06-06 23:00:00,20220700,20220800,20220800 +2022-07-07 14:00:00,20220800,20220900,20220900 +2022-08-07 23:00:00,20220900,20221000,20221000 +2022-09-06 12:00:00,20221000,20221100,20221100 +2022-10-07 13:00:00,20221100,20221200,20221200 +2022-11-07 15:00:00,20221200,20230100,20230100 +2022-12-07 16:00:00,20230100,20230200,20230200 +2023-01-06 20:00:00,20230200,20230300,20230300 +2023-02-03 23:00:00,20230300,20230400,20230400 +2023-03-07 23:00:00,20230400,20230500,20230500 +2023-04-06 14:30:00,20230500,20230600,20230600 +2023-05-08 23:00:00,20230600,20230700,20230700 +2023-06-06 23:00:00,20230700,20230800,20230800 +2023-07-07 23:00:00,20230800,20230900,20230900 +2023-08-07 23:00:00,20230900,20231000,20231000 +2023-09-06 15:00:00,20231000,20231100,20231100 +2023-10-06 23:00:00,20231100,20231200,20231200 diff --git a/data/futures/roll_calendars_csv/ETHEREUM#2.csv b/data/futures/roll_calendars_csv/ETHEREUM#2.csv new file mode 100644 index 0000000000..3becf30fb7 --- /dev/null +++ b/data/futures/roll_calendars_csv/ETHEREUM#2.csv @@ -0,0 +1,35 @@ +DATE_TIME,current_contract,next_contract,carry_contract +2021-02-07 23:00:00,20210300,20210400,20210400 +2021-02-08 19:00:00,20210400,20210500,20210500 +2021-03-08 01:00:00,20210500,20210600,20210600 +2021-04-06 01:00:00,20210600,20210700,20210700 +2021-05-07 12:00:00,20210700,20210800,20210800 +2021-06-07 02:00:00,20210800,20210900,20210900 +2021-07-07 02:00:00,20210900,20211000,20211000 +2021-08-06 01:00:00,20211000,20211100,20211100 +2021-09-06 03:00:00,20211100,20211200,20211200 +2021-10-07 01:00:00,20211200,20220100,20220100 +2021-11-05 13:00:00,20220100,20220200,20220200 +2021-12-07 12:00:00,20220200,20220300,20220300 +2022-01-07 03:00:00,20220300,20220400,20220400 +2022-02-04 03:00:00,20220400,20220500,20220500 +2022-03-07 08:00:00,20220500,20220600,20220600 +2022-04-06 01:00:00,20220600,20220700,20220700 +2022-05-06 09:00:00,20220700,20220800,20220800 +2022-06-06 23:00:00,20220800,20220900,20220900 +2022-07-07 14:00:00,20220900,20221000,20221000 +2022-08-07 23:00:00,20221000,20221100,20221100 +2022-09-06 12:00:00,20221100,20221200,20221200 +2022-10-07 13:00:00,20221200,20230100,20230100 +2022-11-07 15:00:00,20230100,20230200,20230200 +2022-12-07 16:00:00,20230200,20230300,20230300 +2023-01-06 20:00:00,20230300,20230400,20230400 +2023-02-03 23:00:00,20230400,20230500,20230500 +2023-03-07 23:00:00,20230500,20230600,20230600 +2023-04-06 14:30:00,20230600,20230700,20230700 +2023-05-08 23:00:00,20230700,20230800,20230800 +2023-06-06 23:00:00,20230800,20230900,20230900 +2023-07-07 23:00:00,20230900,20231000,20231000 +2023-08-07 23:00:00,20231000,20231100,20231100 +2023-09-06 15:00:00,20231100,20231200,20231200 +2023-10-06 23:00:00,20231200,20240100,20240100 diff --git a/data/futures/roll_calendars_csv/ETHEREUM#3.csv b/data/futures/roll_calendars_csv/ETHEREUM#3.csv new file mode 100644 index 0000000000..139dc827fb --- /dev/null +++ b/data/futures/roll_calendars_csv/ETHEREUM#3.csv @@ -0,0 +1,35 @@ +DATE_TIME,current_contract,next_contract,carry_contract +2021-02-07 23:00:00,20210400,20210500,20210500 +2021-02-08 19:00:00,20210500,20210600,20210600 +2021-03-08 01:00:00,20210600,20210700,20210700 +2021-04-06 01:00:00,20210700,20210800,20210800 +2021-05-07 12:00:00,20210800,20210900,20210900 +2021-06-07 02:00:00,20210900,20211000,20211000 +2021-07-07 02:00:00,20211000,20211100,20211100 +2021-08-06 01:00:00,20211100,20211200,20211200 +2021-09-06 03:00:00,20211200,20220100,20220100 +2021-10-07 01:00:00,20220100,20220200,20220200 +2021-11-05 13:00:00,20220200,20220300,20220300 +2021-12-07 12:00:00,20220300,20220400,20220400 +2022-01-07 03:00:00,20220400,20220500,20220500 +2022-02-04 03:00:00,20220500,20220600,20220600 +2022-03-07 08:00:00,20220600,20220700,20220700 +2022-04-06 01:00:00,20220700,20220800,20220800 +2022-05-06 09:00:00,20220800,20220900,20220900 +2022-06-06 23:00:00,20220900,20221000,20221000 +2022-07-07 14:00:00,20221000,20221100,20221100 +2022-08-07 23:00:00,20221100,20221200,20221200 +2022-09-06 12:00:00,20221200,20230100,20230100 +2022-10-07 13:00:00,20230100,20230200,20230200 +2022-11-07 15:00:00,20230200,20230300,20230300 +2022-12-07 16:00:00,20230300,20230400,20230400 +2023-01-06 20:00:00,20230400,20230500,20230500 +2023-02-03 23:00:00,20230500,20230600,20230600 +2023-03-07 23:00:00,20230600,20230700,20230700 +2023-04-06 14:30:00,20230700,20230800,20230800 +2023-05-08 23:00:00,20230800,20230900,20230900 +2023-06-06 23:00:00,20230900,20231000,20231000 +2023-07-07 23:00:00,20231000,20231100,20231100 +2023-08-07 23:00:00,20231100,20231200,20231200 +2023-09-06 15:00:00,20231200,20240100,20240100 +2023-10-06 23:00:00,20240100,20240200,20240200 diff --git a/setup.py b/setup.py index 3dcfd64f4a..0820d53cbc 100755 --- a/setup.py +++ b/setup.py @@ -84,13 +84,8 @@ def dir_this_file(): package_data=package_data, long_description=read("README.md"), install_requires=[ -<<<<<<< HEAD "pandas==2.1.3", "matplotlib>=3.0.0", -======= - "pandas==1.0.5", - "matplotlib>=3.0.0,<3.8.0", ->>>>>>> bcfd9668056ff6748edd37538e079dbf22c657e4 "ib-insync==0.9.86", "PyYAML>=5.3", "numpy>=1.24.0", diff --git a/sysexecution/stack_handler/roll_orders.py b/sysexecution/stack_handler/roll_orders.py index 0b27dad56f..21e53b1a45 100644 --- a/sysexecution/stack_handler/roll_orders.py +++ b/sysexecution/stack_handler/roll_orders.py @@ -454,7 +454,6 @@ def create_instrument_roll_order_closing_priced_contract( return instrument_order - def get_strategy_name_with_largest_position_for_instrument( data: dataBlob, instrument_code: str ) -> str: From eee5391c21b3fadef42d4d35fddd834e8a4b5361 Mon Sep 17 00:00:00 2001 From: Andy Geach Date: Wed, 14 Feb 2024 11:18:01 +0000 Subject: [PATCH 230/235] fixes split frequency price file support - no longer using '/' character --- sysdata/csv/csv_futures_contract_prices.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/sysdata/csv/csv_futures_contract_prices.py b/sysdata/csv/csv_futures_contract_prices.py index e1b9f8fcba..872b0db99f 100644 --- a/sysdata/csv/csv_futures_contract_prices.py +++ b/sysdata/csv/csv_futures_contract_prices.py @@ -223,7 +223,7 @@ def _keyname_given_contract_object_and_freq( if frequency is MIXED_FREQ: frequency_str = "" else: - frequency_str = frequency.name + "/" + frequency_str = frequency.name + "_" instrument_str = str(futures_contract_object.instrument) date_str = str(futures_contract_object.date_str) @@ -239,11 +239,11 @@ def _contract_tuple_and_freq_given_keyname(self, keyname: str) -> tuple: :param keyname: str :return: tuple instrument_code, contract_date """ - first_split_keyname_as_list = keyname.split("/") - if len(first_split_keyname_as_list) == 2: + if keyname.startswith("Day") or keyname.startswith("Hour"): ## has frequency - frequency = Frequency[first_split_keyname_as_list[0]] - residual_keyname = first_split_keyname_as_list[1] + index = keyname.find("_") + frequency = Frequency[keyname[:index]] + residual_keyname = keyname[index + 1 :] else: ## no frequency, mixed data frequency = MIXED_FREQ From af8741f5a57296faca7a4ad5fa6920b802467aa1 Mon Sep 17 00:00:00 2001 From: Andy Geach Date: Wed, 14 Feb 2024 11:19:04 +0000 Subject: [PATCH 231/235] adds support for import of split frequency price files --- .../contract_prices_from_csv_to_arctic.py | 40 +++++++++++++------ 1 file changed, 28 insertions(+), 12 deletions(-) diff --git a/sysinit/futures/contract_prices_from_csv_to_arctic.py b/sysinit/futures/contract_prices_from_csv_to_arctic.py index 4d489e8bae..1ab674a028 100644 --- a/sysinit/futures/contract_prices_from_csv_to_arctic.py +++ b/sysinit/futures/contract_prices_from_csv_to_arctic.py @@ -1,5 +1,5 @@ from syscore.constants import arg_not_supplied - +from syscore.dateutils import MIXED_FREQ, HOURLY_FREQ, DAILY_PRICE_FREQ from sysdata.csv.csv_futures_contract_prices import csvFuturesContractPriceData from sysproduction.data.prices import diagPrices from sysobjects.contracts import futuresContract @@ -8,7 +8,9 @@ def init_db_with_csv_futures_contract_prices( - datapath: str, csv_config=arg_not_supplied + datapath: str, + csv_config=arg_not_supplied, + frequency=MIXED_FREQ, ): csv_prices = csvFuturesContractPriceData(datapath) input( @@ -16,25 +18,32 @@ def init_db_with_csv_futures_contract_prices( % csv_prices.datapath ) - instrument_codes = csv_prices.get_list_of_instrument_codes_with_merged_price_data() + instrument_codes = ( + csv_prices.get_list_of_instrument_codes_with_price_data_at_frequency(frequency) + ) instrument_codes.sort() for instrument_code in instrument_codes: init_db_with_csv_futures_contract_prices_for_code( - instrument_code, datapath, csv_config=csv_config + instrument_code, datapath, csv_config=csv_config, frequency=frequency ) def init_db_with_csv_futures_contract_prices_for_code( - instrument_code: str, datapath: str, csv_config=arg_not_supplied + instrument_code: str, + datapath: str, + csv_config=arg_not_supplied, + frequency=MIXED_FREQ, ): print(instrument_code) csv_prices = csvFuturesContractPriceData(datapath, config=csv_config) db_prices = diag_prices.db_futures_contract_price_data - print("Getting .csv prices may take some time") - csv_price_dict = csv_prices.get_merged_prices_for_instrument(instrument_code) + print(f"Getting {frequency} .csv prices may take some time") + csv_price_dict = csv_prices.get_prices_at_frequency_for_instrument( + instrument_code, frequency + ) - print("Have .csv prices for the following contracts:") + print(f"Have {frequency} .csv prices for the following contracts:") print(str(csv_price_dict.keys())) for contract_date_str, prices_for_contract in csv_price_dict.items(): @@ -43,11 +52,16 @@ def init_db_with_csv_futures_contract_prices_for_code( contract = futuresContract(instrument_code, contract_date_str) print("Contract object is %s" % str(contract)) print("Writing to db") - db_prices.write_merged_prices_for_contract_object( - contract, prices_for_contract, ignore_duplication=True + db_prices.write_prices_at_frequency_for_contract_object( + contract, + prices_for_contract, + ignore_duplication=True, + frequency=frequency, + ) + print(f"Reading back {frequency} prices from db to check") + written_prices = db_prices.get_prices_at_frequency_for_contract_object( + contract, frequency=frequency ) - print("Reading back prices from db to check") - written_prices = db_prices.get_merged_prices_for_contract_object(contract) print("Read back prices are \n %s" % str(written_prices)) @@ -56,3 +70,5 @@ def init_db_with_csv_futures_contract_prices_for_code( # modify flags as required datapath = "*** NEED TO DEFINE A DATAPATH***" init_db_with_csv_futures_contract_prices(datapath) + # init_db_with_csv_futures_contract_prices(datapath, frequency=HOURLY_FREQ) + # init_db_with_csv_futures_contract_prices(datapath, frequency=DAILY_PRICE_FREQ) From 3a62d59f8a4390a2cf9aa8904bbe1079afb94fbc Mon Sep 17 00:00:00 2001 From: Andy Geach Date: Thu, 15 Feb 2024 10:10:15 +0000 Subject: [PATCH 232/235] link to bc-utils usage guide --- docs/data.md | 68 +--------------------------------------------------- 1 file changed, 1 insertion(+), 67 deletions(-) diff --git a/docs/data.md b/docs/data.md index c3ac4d418d..0ae8a45866 100644 --- a/docs/data.md +++ b/docs/data.md @@ -178,73 +178,7 @@ Once we have the data we can also store it, in principle, anywhere but I will be By the way I can't just pull down this data myself and put it on github to save you time. Storing large amounts of data in github isn't a good idea regardless of whether it is in .csv or Mongo files, and there would also be licensing issues with me basically just copying and pasting raw data that belongs to someone else. You have to get, and then store, this stuff yourself. And of course at some point in a live system you would be updating this yourself. -An easy way to bulk download data from [Barchart](https://www.barchart.com) is to create a Premier account, which allows for up to 100 data downloads per day, and to use [bc-utils](https://github.com/bug-or-feature/bc-utils) by [Andy Geach](https://github.com/bug-or-feature). -We explain how to use it with pysystemtrade at the time of writing below, but we recommend that you read the bc-utils documentation in case these instructions become stale with updated versions of the tool. - -To set up bc-utils for use with pysystemtrade, you can use the following steps: -1. Clone the bc-utils repo to some directory of your choice. For concreteness, we will be using `~/bc-utils` here. - -2. Edit `~/bc-utils/bcutils/config.py` to contain the list of contracts you want to download data for. -For example, -```python -CONTRACT_MAP = { - "RICE": {"code": "ZR", "cycle": "FHKNUX", "tick_date": "2009-01-01"}, - "SOYOIL": {"code": "ZL", "cycle": "FHKNQUVZ", "tick_date": "2008-05-04"}, -} -``` -indicates that we are downloading data for the contracts ZR and ZL on Barchart and are matching them to the symbols RICE and SOYOIL, respectively, in pysystemtrade. -Further, we are downloading the months FHKNUX and FHKNQUVZ, respectively, with hourly data starting from 2009-01-01 and 2008-05-04, respectively, and daily data before those dates. - -3. Replace the last code block in `~/bc-utils/bcutils/bc_utils.py` (starting from line 420, at [the time of writing](https://github.com/bug-or-feature/bc-utils/commit/3b95acaa2bbae87af3aaef65dd4f50839986a7d4)) with - -```python -get_barchart_downloads( - create_bc_session(config=config), - contract_map=CONTRACT_MAP, - save_directory="BARCHART_DATA_DOWNLOAD_DIRECTORY", - start_year=1975, - end_year=2026, - dry_run=False) -``` -(Here, you can set `dry_run` to `True` if you would like to try this script without using any of your 100 daily downloads.) - -4. In `~/bc-utils/bcutils/bc_utils.py`, set your Barchart username (BARCHART_USERNAME), password (BARCHART_PASSWORD), and the desired data path (BARCHART_DATA_DOWNLOAD_DIRECTORY) for the Barchart data here: -```python -'barchart_username': 'BARCHART_USERNAME', -'barchart_password': 'BARCHART_PASSWORD' -``` - -5. If desired, add bc-utils to your crontab by adding a line like -``` -00 08 * * 1-7 . $HOME/.profile; cd ~/bc-utils ; python3 bcutils/bc_utils.py >> $ECHO_PATH/barchart_download.txt 2>&1 -``` -This can be helpful given the daily limit of 100 downloads. - -6. Once you have downloaded the data you want, you can add them to the mongo database by running the following python snippet (with your chosen BARCHART_DATA_DOWNLOAD_DIRECTORY) from the pysystemtrade directory: -```python -from sysdata.csv.csv_futures_contract_prices import ConfigCsvFuturesPrices -from sysinit.futures.contract_prices_from_csv_to_arctic import ( - init_arctic_with_csv_futures_contract_prices, -) - - -barchart_csv_config = ConfigCsvFuturesPrices(input_date_index_name="Time", - input_skiprows=0, - input_skipfooter=1, - input_date_format="%Y-%m-%d", - input_column_mapping=dict(OPEN="Open", HIGH="High", LOW="Low", FINAL="Close", VOLUME="Volume" - ), -) - - -def transfer_barchart_prices_to_arctic(datapath): - init_arctic_with_csv_futures_contract_prices( - datapath, csv_config=barchart_csv_config - ) - - -transfer_barchart_prices_to_arctic(BARCHART_DATA_DOWNLOAD_DIRECTORY) -``` +An easy way to bulk download data from [Barchart](https://www.barchart.com) is to create a Premier account, which allows for up to 250 data downloads per day, and to use [bc-utils](https://github.com/bug-or-feature/bc-utils). That project has a [guide for pysystemtrade users](https://github.com/bug-or-feature/bc-utils?tab=readme-ov-file#for-pysystemtrade-users). Alternatively, if you are very patient, you can manually download the data from the Barchart historical data pages, such as [this one for Cotton #2](https://www.barchart.com/futures/quotes/KG*0/historical-download). From 36109630df90f70677aeb6dbd2d4178f11f0416d Mon Sep 17 00:00:00 2001 From: Andy Geach Date: Thu, 15 Feb 2024 16:32:30 +0000 Subject: [PATCH 233/235] allow csv_data_path to be passed to __init__() --- sysdata/sim/db_futures_sim_data.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/sysdata/sim/db_futures_sim_data.py b/sysdata/sim/db_futures_sim_data.py index 8ebaf92835..848ea143fe 100644 --- a/sysdata/sim/db_futures_sim_data.py +++ b/sysdata/sim/db_futures_sim_data.py @@ -26,11 +26,15 @@ class dbFuturesSimData(genericBlobUsingFuturesSimData): def __init__( - self, data: dataBlob = arg_not_supplied, log=get_logger("dbFuturesSimData") + self, + data: dataBlob = arg_not_supplied, + csv_data_paths=arg_not_supplied, + log=get_logger("dbFuturesSimData"), ): if data is arg_not_supplied: data = dataBlob( log=log, + csv_data_paths=csv_data_paths, class_list=[ get_class_for_data_type(FUTURES_ADJUSTED_PRICE_DATA), get_class_for_data_type(FUTURES_MULTIPLE_PRICE_DATA), From 0322166d4637b34517297f6cd2fbc17ea43293af Mon Sep 17 00:00:00 2001 From: Andy Geach Date: Tue, 20 Feb 2024 13:07:00 +0000 Subject: [PATCH 234/235] fix version so it matches CHANGELOG --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 0820d53cbc..b259312316 100755 --- a/setup.py +++ b/setup.py @@ -71,7 +71,7 @@ def dir_this_file(): setup( name="pysystemtrade", - version="1.61.0", + version="1.80", author="Robert Carver", description=( "Python framework for running systems as in Robert Carver's book Systematic Trading" From 374a6b5e1a36908dbb555d329e42cb1f01fabc51 Mon Sep 17 00:00:00 2001 From: Andy Geach Date: Thu, 22 Feb 2024 12:50:37 +0000 Subject: [PATCH 235/235] also generating merged prices when importing split frequency CSV prices --- .../contract_prices_from_csv_to_arctic.py | 31 +++++++++++++++++++ 1 file changed, 31 insertions(+) diff --git a/sysinit/futures/contract_prices_from_csv_to_arctic.py b/sysinit/futures/contract_prices_from_csv_to_arctic.py index 1ab674a028..3d46d695f1 100644 --- a/sysinit/futures/contract_prices_from_csv_to_arctic.py +++ b/sysinit/futures/contract_prices_from_csv_to_arctic.py @@ -1,5 +1,6 @@ from syscore.constants import arg_not_supplied from syscore.dateutils import MIXED_FREQ, HOURLY_FREQ, DAILY_PRICE_FREQ +from syscore.pandas.frequency import merge_data_with_different_freq from sysdata.csv.csv_futures_contract_prices import csvFuturesContractPriceData from sysproduction.data.prices import diagPrices from sysobjects.contracts import futuresContract @@ -64,6 +65,36 @@ def init_db_with_csv_futures_contract_prices_for_code( ) print("Read back prices are \n %s" % str(written_prices)) + # if we're importing hourly or daily, we need to also generate MIXED + if frequency != MIXED_FREQ: + create_merged_prices(contract) + + +def create_merged_prices(contract): + db_prices = diag_prices.db_futures_contract_price_data + if db_prices.has_price_data_for_contract_at_frequency( + contract, DAILY_PRICE_FREQ + ) and db_prices.has_price_data_for_contract_at_frequency(contract, HOURLY_FREQ): + print(f"DB has hourly and daily prices for {contract}, creating merged prices") + list_of_data = [ + diag_prices.get_prices_at_frequency_for_contract_object( + contract, + frequency=frequency, + ) + for frequency in [HOURLY_FREQ, DAILY_PRICE_FREQ] + ] + merged_prices = merge_data_with_different_freq(list_of_data) + print("Writing to db") + db_prices.write_prices_at_frequency_for_contract_object( + contract, merged_prices, frequency=MIXED_FREQ, ignore_duplication=True + ) + print("Reading back prices from db to check") + written_merged_prices = db_prices.get_prices_at_frequency_for_contract_object( + contract, frequency=MIXED_FREQ + ) + + print(f"Read back prices (MIXED) are \n{str(written_merged_prices)}") + if __name__ == "__main__": input("Will overwrite existing prices are you sure?! CTL-C to abort")