diff --git a/gs_quant/analytics/core/processor.py b/gs_quant/analytics/core/processor.py index bc28ab3c..0f91562d 100644 --- a/gs_quant/analytics/core/processor.py +++ b/gs_quant/analytics/core/processor.py @@ -14,6 +14,7 @@ under the License. """ import asyncio +import functools import logging import uuid from abc import ABCMeta, abstractmethod @@ -57,6 +58,13 @@ class DataQueryInfo: data: Series = None +@dataclass +class MeasureQueryInfo: + attr: str + processor: 'BaseProcessor' + entity: Entity + + DateOrDatetimeOrRDate = Union[DateOrDatetime, RelativeDate] @@ -70,6 +78,7 @@ def __init__(self, **kwargs): self.children_data: Dict[str, ProcessorResult] = {} self.data_cell = None self.last_value = kwargs.get('last_value', False) + self.measure_processor = kwargs.get('measure_processor', False) @abstractmethod def process(self, *args): @@ -86,7 +95,7 @@ def __handle_date_range(self, result, rdate_entity_map: Dict[str, date]): """ - Applies a date/datetime mask on the result using the start/end parameters on a processoor + Applies a date/datetime mask on the result using the start/end parameters on a processor :param result: :param rdate_entity_map: map of entity, rule, base_date to date value :return: None @@ -125,23 +134,32 @@ async def update(self, attribute: str, result: ProcessorResult, rdate_entity_map: Dict[str, date], - pool: ProcessPoolExecutor = None): + pool: ProcessPoolExecutor = None, + query_info: Union[DataQueryInfo, MeasureQueryInfo] = None): """ Handle the update of a single coordinate and recalculate the value :param attribute: Attribute alinging to data coordinate in the processor :param result: Processor result including success and series from data query """ - self.__handle_date_range(result, rdate_entity_map) + if not self.measure_processor: + self.__handle_date_range(result, rdate_entity_map) self.children_data[attribute] = result if isinstance(result, ProcessorResult): if result.success: try: if pool: - value = await asyncio.get_running_loop().run_in_executor(pool, self.process) + if self.measure_processor: + value = await asyncio.get_running_loop()\ + .run_in_executor(pool, functools.partial(self.process, query_info.entity)) + else: + value = await asyncio.get_running_loop().run_in_executor(pool, self.process) self.value = value else: - self.process() + if self.measure_processor: + self.process(query_info.entity) + else: + self.process() self.post_process() except Exception as e: self.value = ProcessorResult(False, @@ -168,7 +186,7 @@ def __add_required_rdates(self, entity: Entity, rdate_entity_map: Dict[str, Set[ def build_graph(self, entity: Entity, cell, - queries: List[DataQueryInfo], + queries: List[Union[DataQueryInfo, MeasureQueryInfo]], rdate_entity_map: Dict[str, Set[Tuple]], overrides: Optional[List]): """ Generates the nested cell graph and keeps a map of leaf data queries to processors""" @@ -177,6 +195,9 @@ def build_graph(self, attributes = self.__dict__ + if self.measure_processor: + queries.append(MeasureQueryInfo(attr='a', processor=self, entity=entity)) + for attr_name, child in self.children.items(): if isinstance(child, DataCoordinate): # Override coordinate dimensions @@ -212,14 +233,15 @@ async def calculate(self, attribute: str, result: ProcessorResult, rdate_entity_map: Dict[str, date], - pool: ProcessPoolExecutor = None): + pool: ProcessPoolExecutor = None, + query_info: Union[DataQueryInfo, MeasureQueryInfo] = None): """ Sets the result on the processor and recursively calls parent to set and calculate value :param attribute: Attribute alinging to data coordinate in the processor :param result: Processor result including success and series from data query """ # update the result - await self.update(attribute, result, rdate_entity_map, pool) + await self.update(attribute, result, rdate_entity_map, pool, query_info) # if there is a parent, traverse up and recompute if self.parent: diff --git a/gs_quant/analytics/core/query_helpers.py b/gs_quant/analytics/core/query_helpers.py index c8659ed7..647ca3e5 100644 --- a/gs_quant/analytics/core/query_helpers.py +++ b/gs_quant/analytics/core/query_helpers.py @@ -21,6 +21,7 @@ from pandas import DataFrame, to_datetime +from gs_quant.analytics.core.processor import MeasureQueryInfo from gs_quant.analytics.core.processor_result import ProcessorResult from gs_quant.data import DataFrequency from gs_quant.session import GsSession @@ -31,6 +32,8 @@ def aggregate_queries(query_infos): mappings = defaultdict(dict) # DataSet -> start/end for query_info in query_infos: + if isinstance(query_info, MeasureQueryInfo): + continue query = query_info.query coordinate = query.coordinate dataset_id = coordinate.dataset_id diff --git a/gs_quant/analytics/datagrid/data_cell.py b/gs_quant/analytics/datagrid/data_cell.py index 57203fff..736f5bbe 100644 --- a/gs_quant/analytics/datagrid/data_cell.py +++ b/gs_quant/analytics/datagrid/data_cell.py @@ -16,13 +16,13 @@ import copy import uuid -from typing import List, Optional, Dict, Set, Tuple +from typing import List, Optional, Dict, Set, Tuple, Union from pandas import Series from gs_quant.analytics.common import DATA_CELL_NOT_CALCULATED from gs_quant.analytics.core import BaseProcessor -from gs_quant.analytics.core.processor import DataQueryInfo +from gs_quant.analytics.core.processor import DataQueryInfo, MeasureQueryInfo from gs_quant.analytics.core.processor_result import ProcessorResult from gs_quant.analytics.datagrid import Override from gs_quant.analytics.datagrid.utils import get_utc_now @@ -61,7 +61,8 @@ def __init__(self, # Store the cell data queries self.data_queries: List[DataQueryInfo] = [] - def build_cell_graph(self, all_queries: List[DataQueryInfo], rdate_entity_map: Dict[str, Set[Tuple]]) -> None: + def build_cell_graph(self, all_queries: List[Union[DataQueryInfo, MeasureQueryInfo]], + rdate_entity_map: Dict[str, Set[Tuple]]) -> None: """ Generate and store the cell graph and data queries This can be modified to return the data queries rather than store on the cell diff --git a/gs_quant/analytics/datagrid/datagrid.py b/gs_quant/analytics/datagrid/datagrid.py index e904f5e7..a150bb28 100644 --- a/gs_quant/analytics/datagrid/datagrid.py +++ b/gs_quant/analytics/datagrid/datagrid.py @@ -29,7 +29,7 @@ from gs_quant.analytics.common import DATAGRID_HELP_MSG from gs_quant.analytics.common.helpers import resolve_entities, get_entity_rdate_key, get_entity_rdate_key_from_rdate, \ get_rdate_cache_key -from gs_quant.analytics.core.processor import DataQueryInfo +from gs_quant.analytics.core.processor import DataQueryInfo, MeasureQueryInfo from gs_quant.analytics.core.processor_result import ProcessorResult from gs_quant.analytics.core.query_helpers import aggregate_queries, fetch_query, build_query_string, valid_dimensions from gs_quant.analytics.datagrid.data_cell import DataCell @@ -125,7 +125,7 @@ def __init__(self, # store the graph, data queries to leaf processors and results self._primary_column_index: int = kwargs.get('primary_column_index', 0) self._cells: List[DataCell] = [] - self._data_queries: List[DataQueryInfo] = [] + self._data_queries: List[Union[DataQueryInfo, MeasureQueryInfo]] = [] self._entity_cells: List[DataCell] = [] self._coord_processor_cells: List[DataCell] = [] self._value_cells: List[DataCell] = [] @@ -152,7 +152,7 @@ def initialize(self) -> None: Upon providing data to a leaf, the leaf processor is calculated and propagated up the graph to the cell level. """ - all_queries: List[DataQueryInfo] = [] + all_queries: List[Union[DataQueryInfo, MeasureQueryInfo]] = [] entity_cells: List[DataCell] = [] current_row_group = None @@ -201,6 +201,8 @@ def initialize(self) -> None: cell.processor.children['a'].set_dimensions(data_overrides[-1].dimensions) self._coord_processor_cells.append(cell) + elif column_processor.measure_processor: + all_queries.append(MeasureQueryInfo(attr='', entity=entity, processor=column_processor)) else: # append the required queries to the map cell.build_cell_graph(all_queries, self.rdate_entity_map) @@ -342,7 +344,8 @@ def _resolve_queries(self, availability_cache: Dict = None) -> None: for query in self._data_queries: entity = query.entity - if isinstance(entity, str): # If we were unable to fetch entity (404/403) + if isinstance(entity, str) or isinstance(query, MeasureQueryInfo): + # If we were unable to fetch entity (404/403) or if we're processing a measure processor continue query = query.query coord = query.coordinate @@ -397,7 +400,13 @@ def _fetch_queries(self): query_info.data = Series() for query_info in self._data_queries: - if query_info.data is None or len(query_info.data) == 0: + if isinstance(query_info, MeasureQueryInfo): + asyncio.get_event_loop().run_until_complete( + query_info.processor.calculate(query_info.attr, + ProcessorResult(True, None), + self.rule_cache, + query_info=query_info)) + elif query_info.data is None or len(query_info.data) == 0: asyncio.get_event_loop().run_until_complete( query_info.processor.calculate(query_info.attr, ProcessorResult(False, diff --git a/gs_quant/analytics/processors/__init__.py b/gs_quant/analytics/processors/__init__.py index 001adf8c..cf386a15 100644 --- a/gs_quant/analytics/processors/__init__.py +++ b/gs_quant/analytics/processors/__init__.py @@ -16,9 +16,9 @@ from .analysis_processors import DiffProcessor from .econometrics_processors import SharpeRatioProcessor, VolatilityProcessor, CorrelationProcessor, ChangeProcessor, \ - ReturnsProcessor, BetaProcessor + ReturnsProcessor, BetaProcessor, FXImpliedCorrProcessor from .special_processors import EntityProcessor, CoordinateProcessor -from .statistics_processors import PercentilesProcessor, PercentileProcessor, \ +from .statistics_processors import PercentilesProcessor, PercentileProcessor, StdMoveProcessor, \ CovarianceProcessor, ZscoresProcessor, MeanProcessor, VarianceProcessor, SumProcessor, StdDevProcessor from .utility_processors import LastProcessor, AppendProcessor, AdditionProcessor, SubtractionProcessor, \ MultiplicationProcessor, DivisionProcessor, MinProcessor, MaxProcessor, NthLastProcessor diff --git a/gs_quant/analytics/processors/econometrics_processors.py b/gs_quant/analytics/processors/econometrics_processors.py index b6666f62..6cdab25f 100644 --- a/gs_quant/analytics/processors/econometrics_processors.py +++ b/gs_quant/analytics/processors/econometrics_processors.py @@ -21,12 +21,14 @@ from gs_quant.analytics.core.processor import BaseProcessor, DataCoordinateOrProcessor, DataQueryInfo, \ DateOrDatetimeOrRDate from gs_quant.analytics.core.processor_result import ProcessorResult +from gs_quant.analytics.processors.special_processors import MeasureProcessor from gs_quant.data.coordinate import DataCoordinate from gs_quant.data.query import DataQuery from gs_quant.entities.entity import Entity -from gs_quant.markets.securities import Stock +from gs_quant.markets.securities import Stock, Cross from gs_quant.target.common import Currency -from gs_quant.timeseries import correlation, Window, SeriesType, DataMeasure, DataFrequency +from gs_quant.timeseries import correlation, Window, SeriesType, DataMeasure, DataFrequency, DataContext, \ + fx_implied_correlation from gs_quant.timeseries import excess_returns_pure from gs_quant.timeseries.econometrics import get_ratio_pure, SharpeAssets, change, returns from gs_quant.timeseries.econometrics import volatility, Returns, beta @@ -349,3 +351,47 @@ def process(self): def get_plot_expression(self): pass + + +class FXImpliedCorrProcessor(MeasureProcessor): + + def __init__(self, + *, + cross2: Entity = None, + tenor: str = '3m', + start: Optional[DateOrDatetimeOrRDate] = None, + end: Optional[DateOrDatetimeOrRDate] = None, + **kwargs): + """ CorrelationProcessor + + :param a: DataCoordinate or BaseProcessor for the series + :param benchmark: benchmark to compare price series + :param start: start date or time used in the underlying data query + :param end: end date or time used in the underlying data query + :param w: Window, int, or str: size of window and ramp up to use. e.g. Window(22, 10) where 22 is the window + size and 10 the ramp up value. If w is a string, it should be a relative date like '1m', '1d', etc. + Window size defaults to length of series. + :param type_: type of both input series: prices or returns + """ + super().__init__(**kwargs, ) + self.cross2: Entity = cross2 + self.tenor: str = tenor + # datetime + self.start = start + self.end = end + + def process(self, cross1: Entity) -> ProcessorResult: + if isinstance(cross1, Cross) and isinstance(self.cross2, Cross): + try: + with DataContext(self.start, self.end): + result = fx_implied_correlation(cross1, self.cross2, self.tenor) + self.value = ProcessorResult(True, result) + except Exception as e: + self.value = ProcessorResult(False, str(e)) + else: + self.value = ProcessorResult(False, "Processor does not have valid crosses as inputs") + + return self.value + + def get_plot_expression(self): + pass diff --git a/gs_quant/analytics/processors/special_processors.py b/gs_quant/analytics/processors/special_processors.py index 3809bf6a..546cbd37 100644 --- a/gs_quant/analytics/processors/special_processors.py +++ b/gs_quant/analytics/processors/special_processors.py @@ -95,3 +95,14 @@ def update(self, attribute: str, result: ProcessorResult): def get_plot_expression(self): pass + + +class MeasureProcessor(BaseProcessor): + def __init__(self, **kwargs): + super().__init__(**kwargs, measure_processor=True) + + def process(self, *args): + pass + + def get_plot_expression(self): + pass diff --git a/gs_quant/analytics/processors/statistics_processors.py b/gs_quant/analytics/processors/statistics_processors.py index 9ecfc10f..d756d604 100644 --- a/gs_quant/analytics/processors/statistics_processors.py +++ b/gs_quant/analytics/processors/statistics_processors.py @@ -20,6 +20,7 @@ from gs_quant.analytics.core.processor import BaseProcessor, DataCoordinateOrProcessor, DateOrDatetimeOrRDate from gs_quant.analytics.core.processor_result import ProcessorResult +from gs_quant.timeseries import returns from gs_quant.timeseries.statistics import percentiles, percentile, Window, mean, sum_, std, var, cov, zscores @@ -440,3 +441,53 @@ def process(self): def get_plot_expression(self): pass + + +class StdMoveProcessor(BaseProcessor): + def __init__(self, + a: DataCoordinateOrProcessor, + *, + start: Optional[DateOrDatetimeOrRDate] = None, + end: Optional[DateOrDatetimeOrRDate] = None, + w: Union[Window, int] = None, + **kwargs): + """ StdMoveProcessor: Returns normalized by std deviation of series a + + :param a: DataCoordinate or BaseProcessor for the first series + :param start: start date or time used in the underlying data query + :param end: end date or time used in the underlying data query + :param w: Window or int: size of window and ramp up to use. e.g. Window(22, 10) where 22 is the window size + and 10 the ramp up value. If w is a string, it should be a relative date like '1m', '1d', etc. + Window size defaults to length of series. + """ + super().__init__(**kwargs) + self.children['a'] = a + + self.start = start + self.end = end + self.w = w + + def process(self): + a_data = self.children_data.get('a') + if isinstance(a_data, ProcessorResult): + if a_data.success: + data_series = a_data.data + change_pd = data_series.tail(2) + change = returns(change_pd).iloc[-1] + + # Pass in all values except last value (which is last value) + returns_series = returns(data_series.head(-1)) + std_result = std(returns_series, w=Window(None, 0) if self.w is None else self.w).iloc[-1] + + if change is not None and std_result != 0: + self.value = ProcessorResult(True, pd.Series([change / std_result])) + else: + self.value = ProcessorResult(False, "StdMoveProcessor returns a NaN") + else: + self.value = ProcessorResult(False, "StdMoveProcessor does not have 'a' series values yet") + else: + self.value = ProcessorResult(False, "StdMoveProcessor does not have 'a' series yet") + return self.value + + def get_plot_expression(self): + pass diff --git a/gs_quant/api/gs/data.py b/gs_quant/api/gs/data.py index 1ef0e0f2..9e0448e1 100644 --- a/gs_quant/api/gs/data.py +++ b/gs_quant/api/gs/data.py @@ -135,6 +135,8 @@ class QueryType(Enum): USD_OIS = "Usd Ois" NON_USD_OIS = "Non Usd Ois" SETTLEMENT_PRICE = "Settlement Price" + THEMATIC_EXPOSURE = "Thematic Exposure" + THEMATIC_BETA = "Thematic Beta" class GsDataApi(DataApi): diff --git a/gs_quant/api/gs/portfolios.py b/gs_quant/api/gs/portfolios.py index 21289b55..3089c9ad 100644 --- a/gs_quant/api/gs/portfolios.py +++ b/gs_quant/api/gs/portfolios.py @@ -15,7 +15,7 @@ """ import datetime as dt import logging -from typing import Tuple, Union, List +from typing import Tuple, Union, List, Dict from gs_quant.common import PositionType from gs_quant.instrument import Instrument @@ -215,3 +215,14 @@ def get_custom_aum(cls, if end_date: url += f"&endDate={end_date.strftime('%Y-%m-%d')}" return GsSession.current._get(url)['data'] + + @classmethod + def upload_custom_aum(cls, + portfolio_id: str, + aum_data: List[Dict], + clear_existing_data: bool = None) -> dict: + url = f'/portfolios/{portfolio_id}/aum' + payload = {'data': aum_data} + if clear_existing_data: + url += '?clearExistingData=true' + return GsSession.current._post(url, payload) diff --git a/gs_quant/api/gs/reports.py b/gs_quant/api/gs/reports.py index f043a80d..cb47ac5f 100644 --- a/gs_quant/api/gs/reports.py +++ b/gs_quant/api/gs/reports.py @@ -20,7 +20,7 @@ from gs_quant.session import GsSession from gs_quant.target.common import Currency -from gs_quant.target.reports import Report +from gs_quant.target.reports import Report, FactorRiskTableMode, OrderType _logger = logging.getLogger(__name__) @@ -64,11 +64,13 @@ def delete_report(cls, report_id: str) -> dict: return GsSession.current._delete('/reports/{id}'.format(id=report_id)) @classmethod - def schedule_report(cls, report_id: str, start_date: dt.date, end_date: dt.date) -> dict: + def schedule_report(cls, report_id: str, start_date: dt.date, end_date: dt.date, backcast: bool = False) -> dict: report_schedule_request = { 'startDate': start_date.strftime('%Y-%m-%d'), 'endDate': end_date.strftime('%Y-%m-%d') } + if backcast: + report_schedule_request['parameters'] = {'backcast': backcast} return GsSession.current._post('/reports/{id}/schedule'.format(id=report_id), report_schedule_request) @classmethod @@ -103,21 +105,46 @@ def get_risk_factor_data_results(cls, currency: Currency = None, start_date: dt.date = None, end_date: dt.date = None) -> dict: - url = '' + url = f'/risk/factors/reports/{risk_report_id}/results?' if factors is not None: factors = map(urllib.parse.quote, factors) # to support factors like "Automobiles & Components" - url += '&factors={factors}'.format(factors='&factors='.join(factors)) + url += f'&factors={"&factors=".join(factors)}' if factor_categories is not None: - url += '&factorCategories={categories}'.format(categories='&factorCategories='.join(factor_categories)) + url += f'&factorCategories={"&factorCategories=".join(factor_categories)}' if currency is not None: url += f'¤cy={currency.value}' if start_date is not None: - url += '&startDate={date}'.format(date=start_date.strftime('%Y-%m-%d')) + url += f'&startDate={start_date.strftime("%Y-%m-%d")}' if end_date is not None: - url += '&endDate={date}'.format(date=end_date.strftime('%Y-%m-%d')) + url += f'&endDate={end_date.strftime("%Y-%m-%d")}' + + return GsSession.current._get(url) + + @classmethod + def get_factor_risk_report_table(cls, + risk_report_id: str, + mode: FactorRiskTableMode = None, + factors: List[str] = None, + factor_categories: List[str] = None, + currency: Currency = None, + date: dt.date = None, + order_by_column: str = None, + order_type: OrderType = None) -> dict: + url = f'/risk/factors/reports/{risk_report_id}/tables?' + if mode is not None: + url += f'&mode={mode}' + if currency is not None: + url += f'¤cy={currency.value}' + if date is not None: + url += f'&date={date.strftime("%Y-%m-%d")}' + if factors is not None: + factors = map(urllib.parse.quote, factors) + url += f'&factor={"&factor=".join(factors)}' + if factor_categories is not None: + url += f'&factorCategory={"&factorCategory=".join(factor_categories)}' + if order_by_column is not None: + url += f'&orderByColumn={order_by_column}' + if order_type is not None: + url += f'&orderType={order_type}' - if url: - url = f'/risk/factors/reports/{risk_report_id}/results?' + url[1:] - else: - url = f'/risk/factors/reports/{risk_report_id}/results' return GsSession.current._get(url) diff --git a/gs_quant/api/gs/risk_models.py b/gs_quant/api/gs/risk_models.py index 3e7490a6..f18c79b3 100644 --- a/gs_quant/api/gs/risk_models.py +++ b/gs_quant/api/gs/risk_models.py @@ -16,7 +16,7 @@ import datetime as dt import logging -from typing import Tuple, Dict, List +from typing import Tuple, Dict, List, Union from gs_quant.session import GsSession from gs_quant.target.risk_models import RiskModel, RiskModelCalendar, RiskModelFactor, Term, RiskModelData, \ @@ -142,7 +142,7 @@ def get_risk_model_factor_data(cls, if end_date is not None: url += f'&endDate={end_date.strftime("%Y-%m-%d")}' if identifiers is not None: - url += '&identifier={ids}'.format(ids='&identifier='.join(identifiers)) + url += '&identifiers={ids}'.format(ids='&identifiers='.join(identifiers)) if include_performance_curve: url += '&includePerformanceCurve=true' return GsSession.current._get(url)['results'] @@ -164,7 +164,7 @@ def get_risk_model_coverage(cls, @classmethod def upload_risk_model_data(cls, model_id: str, - model_data: RiskModelData, + model_data: Union[Dict, RiskModelData], partial_upload: bool = False, target_universe_size: float = None) -> str: url = f'/risk/models/data/{model_id}' diff --git a/gs_quant/backtests/backtest_objects.py b/gs_quant/backtests/backtest_objects.py index a0a68685..924d2343 100644 --- a/gs_quant/backtests/backtest_objects.py +++ b/gs_quant/backtests/backtest_objects.py @@ -114,6 +114,32 @@ def result_summary(self, allow_mismatch_risk_keys=True): cash = pd.Series(self._cash_dict, name='Cash') return pd.concat([summary, cash], axis=1, sort=True).fillna(0) + def trade_ledger(self): + # this is a ledger of each instrument when it was entered and when it was closed out. The cash associated + # with the entry and exit are used in the open value and close value and PnL calc. If the PnL is None it + # means the instrument is still live and therefore will show up in the PV + ledger = {} + names = [] + for date, cash_list in self.cash_payments.items(): + for cash in cash_list: + if cash.trade.name in names: + if cash.cash_paid: + ledger[cash.trade.name]['Close'] = date + ledger[cash.trade.name]['Close Value'] += cash.cash_paid + open_value = ledger[cash.trade.name]['Open Value'] + ledger[cash.trade.name]['Trade PnL'] = ledger[cash.trade.name]['Close Value'] - open_value + ledger[cash.trade.name]['Status'] = 'closed' + else: + names.append(cash.trade.name) + ledger[cash.trade.name] = {'Open': date, + 'Close': None, + 'Open Value': cash.cash_paid, + 'Close Value': 0, + 'Long Short': cash.direction, + 'Status': 'open', + 'Trade PnL': None} + return pd.DataFrame(ledger).T.sort_index() + class ScalingPortfolio: def __init__(self, trade, dates, risk, csa_term=None, scaling_parameter='notional_amount'): @@ -131,6 +157,7 @@ def __init__(self, trade, effective_date=None, scale_date=None, direction=1): self.effective_date = effective_date self.scale_date = scale_date self.direction = direction + self.cash_paid = None class PredefinedAssetBacktest: diff --git a/gs_quant/backtests/generic_engine.py b/gs_quant/backtests/generic_engine.py index 82ff83c5..0abc24a0 100644 --- a/gs_quant/backtests/generic_engine.py +++ b/gs_quant/backtests/generic_engine.py @@ -269,14 +269,15 @@ def run_backtest(self, strategy, start=None, end=None, frequency='BM', states=No if d in backtest.cash_payments and d <= end: for cp in backtest.cash_payments[d]: value = cash_results.get(cp.effective_date, {}).get(Price, {}).get(cp.trade.name, {}) - value = value or backtest.results[cp.effective_date][Price][cp.trade.name] + value = backtest.results[cp.effective_date][Price][cp.trade.name] if value == {} else value if cp.scale_date: scale_notional = backtest.portfolio_dict[cp.scale_date][cp.trade.name].notional_amount scale_date_adj = scale_notional / cp.trade.notional_amount - backtest.cash_dict[d] += \ - value * scale_date_adj * cp.direction + cp.cash_paid = value * scale_date_adj * cp.direction + backtest.cash_dict[d] += cp.cash_paid else: - backtest.cash_dict[d] += value * cp.direction + cp.cash_paid = value * cp.direction + backtest.cash_dict[d] += cp.cash_paid current_value = backtest.cash_dict[d] if stored_pc: diff --git a/gs_quant/backtests/order.py b/gs_quant/backtests/order.py index 654db5c2..961ebd6a 100644 --- a/gs_quant/backtests/order.py +++ b/gs_quant/backtests/order.py @@ -154,7 +154,7 @@ def to_dict(self, data_hander: DataHandler) -> dict: return {'Instrument': self.instrument.currency, 'Type': self._short_name(), 'Price': self.execution_price(data_hander), - 'Quantity': self.execution_quantity(data_hander) + 'Quantity': self.execution_quantity() } diff --git a/gs_quant/backtests/predefined_asset_engine.py b/gs_quant/backtests/predefined_asset_engine.py index 4de31818..b7fb5151 100644 --- a/gs_quant/backtests/predefined_asset_engine.py +++ b/gs_quant/backtests/predefined_asset_engine.py @@ -132,7 +132,7 @@ def _timer(self, strategy, start, end, frequency): all_times = [] for d in dates: - if is_business_day(d.date(), self.calendars): + if self.calendars == 'Weekend' or is_business_day(d.date(), self.calendars): for t in times: all_times.append(dt.datetime.combine(d, t)) return all_times diff --git a/gs_quant/backtests/strategy_systematic.py b/gs_quant/backtests/strategy_systematic.py index 6a9eb823..9fa0d82e 100644 --- a/gs_quant/backtests/strategy_systematic.py +++ b/gs_quant/backtests/strategy_systematic.py @@ -13,16 +13,13 @@ specific language governing permissions and limitations under the License. """ -from dateutil.parser import isoparse import logging -import re from typing import Iterable import gs_quant.target.backtests as backtests from gs_quant.api.gs.backtests import GsBacktestApi from gs_quant.backtests.core import Backtest, TradeInMethod, MarketModel from gs_quant.errors import MqValueError -from gs_quant.markets import PricingContext from gs_quant.target.backtests import * from gs_quant.target.instrument import EqOption, EqVarianceSwap @@ -117,15 +114,6 @@ def __init__(self, def check_underlier_fields( underlier: Union[EqOption, EqVarianceSwap] ) -> Union[EqOption, EqVarianceSwap]: - # validation for different fields - if isinstance(underlier.expiration_date, datetime.date): - underlier = underlier.clone() - underlier.expiration_date = '{}d'.format( - (underlier.expiration_date - PricingContext.current.pricing_date).days) - elif re.search(ISO_FORMAT, underlier.expiration_date) is not None: - underlier = underlier.clone() - underlier.expiration_date = '{}d'.format( - (isoparse(underlier.expiration_date).date() - PricingContext.current.pricing_date).days) if isinstance(underlier, EqOption): underlier.number_of_options = None diff --git a/gs_quant/data/fields.py b/gs_quant/data/fields.py index e93b40e9..c83ef412 100644 --- a/gs_quant/data/fields.py +++ b/gs_quant/data/fields.py @@ -62,6 +62,11 @@ class DataMeasure(Enum): PRICE_TO_SALES = 'Price to Sales' RETURN_ON_EQUITY = 'Return on Equity' SALES_PER_SHARE = 'Sales per Share' + ONE_YEAR = '1y' + TWO_YEARS = '2y' + THREE_YEARS = '3y' + FORWARD = 'forward' + TRAILING = 'trailing' def __repr__(self): return self.value diff --git a/gs_quant/datetime/point.py b/gs_quant/datetime/point.py index 9606b37a..95112ab5 100644 --- a/gs_quant/datetime/point.py +++ b/gs_quant/datetime/point.py @@ -314,4 +314,6 @@ def point_sort_order(point: str, ref_date: dt.date = dt.date.today()) -> float: date_str = res.group(1) format_str = '%d%b%y' days = (dt.datetime.strptime(date_str, format_str).date() - ref_date).days + else: + days = 0 return days diff --git a/gs_quant/documentation/02_pricing_and_risk/01_scenarios_and_contexts/examples/01_rollfwd_shock/010102_rollfwd-showing-lifecycling-effects-on-swaps-and-swaptions.ipynb b/gs_quant/documentation/02_pricing_and_risk/01_scenarios_and_contexts/examples/01_rollfwd_shock/010102_rollfwd-showing-lifecycling-effects-on-swaps-and-swaptions.ipynb index 1ba90efd..1a6b35de 100644 --- a/gs_quant/documentation/02_pricing_and_risk/01_scenarios_and_contexts/examples/01_rollfwd_shock/010102_rollfwd-showing-lifecycling-effects-on-swaps-and-swaptions.ipynb +++ b/gs_quant/documentation/02_pricing_and_risk/01_scenarios_and_contexts/examples/01_rollfwd_shock/010102_rollfwd-showing-lifecycling-effects-on-swaps-and-swaptions.ipynb @@ -12,6 +12,7 @@ "from gs_quant.instrument import IRSwap, IRSwaption\n", "from gs_quant.markets import PricingContext\n", "import matplotlib.pyplot as plt\n", + "import gs_quant.risk as risk\n", "import pandas as pd\n", "import numpy as np" ] @@ -35,20 +36,24 @@ "# create a swap which has a 1m floating frequency\n", "swap = IRSwap('Pay', '10y', 'EUR', fixed_rate='ATM-5', floating_rate_frequency='1m', name='EUR10y')\n", "\n", - "# resolve the trade as of today to fix the dates and strike\n", + "# resolve the trade as of today to fix the dates and rate\n", "swap.resolve()\n", "\n", "# roll daily for 66 business days assuming both forward curve is realised and spot curve is realised\n", - "fwd_results = []\n", - "spot_results = []\n", + "fwd_price = []\n", + "fwd_cash = []\n", + "spot_price = []\n", + "spot_cash = []\n", "r = range(0, 66, 6)\n", "# by wrapping all the scenarios into one PricingContext we package all the requests into one call to GS\n", "with PricingContext():\n", " for bus_days in r:\n", " with PricingContext(is_async=True), RollFwd(date=f'{bus_days}b', holiday_calendar='LDN', realise_fwd=True):\n", - " fwd_results.append(swap.price())\n", + " fwd_price.append(swap.price())\n", + " fwd_cash.append(swap.calc(risk.Cashflows))\n", " with PricingContext(is_async=True), RollFwd(date=f'{bus_days}b', holiday_calendar='LDN', realise_fwd=False):\n", - " spot_results.append(swap.price())" + " spot_price.append(swap.price())\n", + " spot_cash.append(swap.calc(risk.Cashflows))" ] }, { @@ -57,14 +62,23 @@ "metadata": {}, "outputs": [], "source": [ - "pd.Series([r.result() for r in fwd_results], index=r, dtype=np.dtype(float)).plot(figsize=(10, 6), \n", - " title='Swap Carry', \n", - " label='{} fwd curve carry'.format(swap.name))\n", - "pd.Series([r.result() for r in spot_results], index=r, dtype=np.dtype(float)).plot(label='{} spot curve carry'.format(swap.name))\n", + "fwd_pv = pd.Series([p.result() for p in fwd_price], index=r)\n", + "spot_pv = pd.Series([p.result() for p in spot_price], index=r)\n", + "\n", + "# The output of the cashflows measure is a dataframe of the past and implied future cashflows. We could filter by payment date\n", + "# but conviently the discount factor is 0 for paid cashflows\n", + "cash_fwd = pd.Series([c.result()[c.result().discount_factor == 0].payment_amount.sum() for c in fwd_cash], index=r)\n", + "cash_spot = pd.Series([c.result()[c.result().discount_factor == 0].payment_amount.sum() for c in spot_cash], index=r)\n", + "\n", + "fwd_pv.plot(figsize=(10, 6), title='Swap Carry', label='{} Realise Fwd'.format(swap.name))\n", + "spot_pv.plot(label='{} Realise Spot'.format(swap.name))\n", + "(fwd_pv+cash_fwd).plot(label='{} Realise Fwd (inc. cash)'.format(swap.name))\n", + "(spot_pv+cash_spot).plot(label='{} Realise Spot (inc. cash)'.format(swap.name))\n", "\n", "plt.xlabel('Business Days from Pricing Date')\n", "plt.ylabel('PV')\n", "plt.legend()\n", + "plt.show()\n", "# note that the steps represent the move in MTM as the cashflows are paid. The libor fixing is implied from the fwd" ] }, @@ -74,8 +88,8 @@ "metadata": {}, "outputs": [], "source": [ - "itm_swaption = IRSwaption('Receive', '10y', 'EUR', strike='ATM+50', expiration_date='1m', name='ITM swaption')\n", - "otm_swaption = IRSwaption('Receive', '10y', 'EUR', strike='ATM-50', expiration_date='1m', name='OTM swaption')\n", + "itm_swaption = IRSwaption('Receive', '10y', 'EUR', strike='ATM+20', expiration_date='1m', name='ITM swaption')\n", + "otm_swaption = IRSwaption('Receive', '10y', 'EUR', strike='ATM-20', expiration_date='1m', name='OTM swaption')\n", "port = Portfolio([itm_swaption, otm_swaption])\n", "port.resolve()\n", "\n", @@ -96,23 +110,10 @@ "metadata": {}, "outputs": [], "source": [ - "pd.Series([rs['ITM swaption'] for rs in fwd_results], index=r, dtype=np.dtype(float)).plot(figsize=(10, 6), \n", - " title='Swaption Carry', \n", - " label='{} carry'.format(itm_swaption.name))\n", - "pd.Series([rs['ITM swaption'] for rs in fwd_results], index=r, dtype=np.dtype(float)).plot(label='{} carry'.format(otm_swaption.name), secondary_y=True)\n", - "\n", - "plt.xlabel('Business Days from Pricing Date')\n", - "plt.ylabel('PV')\n", - "plt.legend()\n", + "df = pd.DataFrame.from_records([[p['ITM swaption'], p['OTM swaption']] for p in fwd_results], index=r, columns=['ITM', 'OTM'])\n", + "df.plot(figsize=(10,6), secondary_y=['OTM'], title='Swaption Carry', xlabel='Business Days', ylabel='PV')\n", "# note that the OTM swaption prices at 0 post expiry whereas the ITM swaption prices at the value of the swap." ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] } ], "metadata": { diff --git a/gs_quant/documentation/03_portfolios/tutorials/Create New Portfolio.ipynb b/gs_quant/documentation/03_portfolios/tutorials/Create New Portfolio.ipynb index 65ab0eb3..9d1f6f7f 100644 --- a/gs_quant/documentation/03_portfolios/tutorials/Create New Portfolio.ipynb +++ b/gs_quant/documentation/03_portfolios/tutorials/Create New Portfolio.ipynb @@ -18,6 +18,7 @@ "To execute all the code in this tutorial, you will need the following application scopes:\n", "- **read_product_data**\n", "- **read_financial_data**\n", + "- **modify_product_data** (must be requested)\n", "- **modify_financial_data** (must be requested)\n", "- **run_analytics** (must be requested)\n", "- **read_user_profile**\n", @@ -44,31 +45,63 @@ }, "outputs": [], "source": [ - "import itertools\n", + "import datetime as dt\n", "\n", - "from gs_quant.api.gs.assets import GsAssetApi\n", - "from gs_quant.api.gs.portfolios import GsPortfolioApi\n", - "from gs_quant.common import PositionSet\n", "from gs_quant.entities.entitlements import Entitlements, EntitlementBlock, User\n", - "from gs_quant.markets.portfolio_manager import PortfolioManager\n", + "from gs_quant.markets.portfolio import Portfolio\n", + "from gs_quant.markets.portfolio_manager import PortfolioManager, CustomAUMDataPoint\n", + "from gs_quant.markets.position_set import Position, PositionSet\n", "from gs_quant.session import GsSession, Environment\n", - "from gs_quant.target.portfolios import Portfolio as MQPortfolio\n", - "from gs_quant.target.portfolios import Position\n", + "from gs_quant.target.portfolios import RiskAumSource\n", "\n", "client = 'ENTER CLIENT ID'\n", "secret = 'ENTER CLIENT SECRET'\n", "\n", - "\n", - "GsSession.use(Environment.PROD, client_id=client, client_secret=secret, scopes=('read_product_data read_financial_data modify_financial_data run_analytics read_user_profile',))\n", + "GsSession.use(\n", + " Environment.PROD,\n", + " client_id=client,\n", + " client_secret=secret,\n", + " scopes=('read_product_data read_financial_data modify_product_data modify_financial_data run_analytics read_user_profile',)\n", + ")\n", "\n", "print('GS Session initialized.')" ] }, + { + "cell_type": "markdown", + "source": [ + "## Step 2: Create the Portfolio\n", + "\n", + "The first step is to create a new, empty portfolio in Marquee." + ], + "metadata": { + "collapsed": false + } + }, + { + "cell_type": "code", + "execution_count": null, + "outputs": [], + "source": [ + "portfolio = Portfolio(name='My New Portfolio')\n", + "portfolio.save()\n", + "\n", + "print(f\"Created portfolio '{portfolio.name}' with ID: {portfolio.id}\")" + ], + "metadata": { + "collapsed": false, + "pycharm": { + "name": "#%%\n" + } + } + }, { "cell_type": "markdown", "metadata": {}, "source": [ - "## Step 2: Define Portfolio Entitlements\n", + "Once your portfolio has been saved to Marquee, the `PortfolioManager` class allows users to interact with their Marquee portfolios directly from GS Quant. We will be using `PortfolioManager` to update portfolio positions, entitlements, update custom AUM, and run reports.\n", + "\n", + "## Step 3: Define Portfolio Entitlements\n", "\n", "By default, an application will have all entitlement permissions to a portfolio it makes. However, if you would like to share the portfolio with others, either Marquee users or other applications, you will need to specify them in the entitlements parameter of the portfolio. Let's walk through how we convert a list of admin and viewer emails into an `Entitlements` object:" ] @@ -90,17 +123,24 @@ "admin_entitlements = EntitlementBlock(users=User.get_many(emails=portfolio_admin_emails))\n", "view_entitlements = EntitlementBlock(users=User.get_many(emails=portfolio_viewer_emails))\n", "\n", - "entitlements = Entitlements(view=view_entitlements,\n", - " admin=admin_entitlements)\n", + "entitlements = Entitlements(\n", + " view=view_entitlements,\n", + " admin=admin_entitlements\n", + ")\n", + "\n", + "print(f'Entitlements:\\n{entitlements.to_dict()}')\n", + "\n", + "pm = PortfolioManager(portfolio.id)\n", + "pm.set_entitlements(entitlements)\n", "\n", - "print(f'Entitlements:\\n{entitlements.to_dict()}')" + "print(f\"Updated entitlements for '{portfolio.name}'\")" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "## Step 3: Define the Positions You Would Like to Upload\n", + "## Step 4: Define Portfolio Positions\n", "\n", "Portfolio positions in Marquee are stored on a holding basis, when means you only upload positions for days where you are rebalancing your portfolio. Take the following set of positions:" ] @@ -115,11 +155,21 @@ }, "outputs": [], "source": [ - "positions = [\n", - " { 'identifier': 'GS UN', 'quantity': 50, 'positionDate': '2020-05-01'},\n", - " { 'identifier': 'AAPL UW', 'quantity': 25, 'positionDate': '2020-05-01'},\n", - " { 'identifier': 'GS UN', 'quantity': 51, 'positionDate': '2020-07-01'},\n", - " { 'identifier': 'AAPL UW', 'quantity': 26, 'positionDate': '2020-07-01'}\n", + "portfolio_position_sets = [\n", + " PositionSet(\n", + " date=dt.date(day=3, month=5, year=2021),\n", + " positions=[\n", + " Position(identifier='AAPL UW', quantity=25),\n", + " Position(identifier='GS UN', quantity=50)\n", + " ]\n", + " ),\n", + " PositionSet(\n", + " date=dt.date(day=1, month=7, year=2021),\n", + " positions=[\n", + " Position(identifier='AAPL UW', quantity=26),\n", + " Position(identifier='GS UN', quantity=51)\n", + " ]\n", + " )\n", "]" ] }, @@ -127,11 +177,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "If these positions were to be uploaded correctly, this portfolio would hold 50 shares of GS UN and 25 shares of AAPL UW from May 1, 2020 to June 30, 2020, and it would hold 51 shares of GS UN and 26 shares of AAPL UW from July 1, 2020 to today.\n", - "\n", - "## Step 4: Format positions\n", - "\n", - "Now let's proceed with updating these positions to our portfolio. The first step is to resolve the identifiers provided into their corresponding unique Marquee identifiers. In this case, positions are identified by Bloomberg ID, but other identifiers can be used and resolved by adding them to the `fields` parameter in the function `GsAssetApi.resolve_assets`." + "If these positions were to be uploaded correctly, this portfolio would hold 50 shares of GS UN and 25 shares of AAPL UW from May 3, 2021 to June 30, 2021, and it would hold 51 shares of GS UN and 26 shares of AAPL UW from July 1, 2021 to today." ] }, { @@ -144,104 +190,68 @@ }, "outputs": [], "source": [ - "all_identifiers = list(set([p['identifier'] for p in positions]))\n", - "results = GsAssetApi.resolve_assets(identifier=all_identifiers, fields=['bbid', 'id'], limit=1)\n", - "try:\n", - " identifier_to_marquee_id = dict(zip(results.keys(), [a[0]['id'] for a in results.values()]))\n", - "except:\n", - " unmapped_assets = {k for k,v in results.items() if not v}\n", - " raise ValueError('Error in resolving the following identifiers: ' + ', '.join(unmapped_assets))\n", + "pm.update_positions(portfolio_position_sets)\n", "\n", - "print('Position identifiers successfully mapped as the following:')\n", - "for mq_id in identifier_to_marquee_id:\n", - " print(f'{mq_id} --> {identifier_to_marquee_id[mq_id]}')" + "print(f\"Updated positions for '{portfolio.name}'\")" ] }, { "cell_type": "markdown", - "metadata": {}, "source": [ - "Next we need to rearrange the data in the positions to fit the format expected in the Marquee API." - ] + "## Step 5: Schedule Reports\n", + "By default, creating a portfolio will automatically create a corresponding Performance Report for it as well. If you would like to create a Factor Risk Report for it as well, follow the steps [here](../examples/marquee/00_create_factor_risk_report.ipynb). Then, remember to schedule all the portfolio reports." + ], + "metadata": { + "collapsed": false + } }, { "cell_type": "code", "execution_count": null, - "metadata": { - "pycharm": { - "name": "#%%\n" - } - }, "outputs": [], "source": [ - "portfolio_position_sets = []\n", - "\n", - "for position_date, positions_on_date in itertools.groupby(positions, lambda x: x['positionDate']):\n", - " formatted_positions = tuple(Position(asset_id=identifier_to_marquee_id[p['identifier']],\n", - " quantity=p['quantity']) for p in positions_on_date)\n", - " position_set = (PositionSet(position_date=position_date,\n", - " positions=formatted_positions))\n", - " portfolio_position_sets.append(position_set)\n", + "pm.schedule_reports()\n", "\n", - "print('Portfolio positions successfully formatted.')" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Step 5: Create the Portfolio\n", - "We're finally ready to create our portfolio and update it with our newly formatted positions." - ] - }, - { - "cell_type": "code", - "execution_count": null, + "print('All portfolio reports scheduled.')" + ], "metadata": { + "collapsed": false, "pycharm": { "name": "#%%\n" } - }, - "outputs": [], - "source": [ - "# Format and create a new empty portfolio\n", - "portfolio = GsPortfolioApi.create_portfolio(MQPortfolio(name='ENTER PORTFOLIO NAME HERE',\n", - " currency='USD',\n", - " entitlements=entitlements.to_target()))\n", - "print(f\"Created portfolio '{portfolio.name}' with ID: {portfolio.id}\")\n", - "\n", - "GsPortfolioApi.update_positions(portfolio.id, portfolio_position_sets)\n", - "print(f\"Updated positions in '{portfolio.name}'\")\n" - ] + } }, { "cell_type": "markdown", "metadata": {}, "source": [ - "## Step 6: Schedule Reports\n", - "By default, creating a portfolio will automatically create a corresponding Performance Report for it as well. If you would like to create a Factor Risk Report for it as well, follow the steps [here](../examples/marquee/00_create_factor_risk_report.ipynb). Then, remember to schedule all the portfolio reports with our `PortfolioManager` class.\n", - "a" + "## Step 6: Update Custom AUM (Optional)\n", + "The `CustomAUMDataPoint` class is used to represent custom AUM data for a specific date. A list of them can be posted to Marquee using our initialized `PortfolioManager`. If you do not upload custom AUM data for your portfolio and change your portfolio's AUM Source to `Custom AUM`, by default the \"AUM\" (which is used for calculating risk as percent values) will be your portfolio's long exposure." ] }, { "cell_type": "code", "execution_count": null, + "outputs": [], + "source": [ + "pm.set_aum_source(RiskAumSource.Custom_AUM)\n", + "custom_aum = [\n", + " CustomAUMDataPoint(date=dt.date(2021, 5, 1), aum=100000),\n", + " CustomAUMDataPoint(date=dt.date(2021, 7, 1), aum=200000)\n", + "]\n", + "pm.upload_custom_aum(custom_aum, clear_existing_data=False)\n", + "\n", + "print(f\"Custom AUM for '{portfolio.name} successfully uploaded'\")" + ], "metadata": { + "collapsed": false, "pycharm": { "name": "#%%\n" } - }, - "outputs": [], - "source": [ - "pm = PortfolioManager(portfolio.id)\n", - "pm.schedule_reports()\n", - "\n", - "print('All portfolio reports scheduled.')" - ] + } }, { "cell_type": "markdown", - "metadata": {}, "source": [ "### You're all set, Congrats! What's next?\n", "\n", @@ -254,8 +264,14 @@ "* [Retrieving the portfolio's factor risk and attribution analytics](../tutorials/Pull%20Portfolio%20Factor%20Risk%20Data.ipynb)\n", "\n", "\n", - "*Other questions? Reach out to the [Portfolio Analytics team](mailto:gs-marquee-analytics-support@gs.com)!*" - ] + "*Other questions? Reach out to the [Portfolio Analytics team](mailto:gs-marquee-analytics-support@gs.com)!*\n" + ], + "metadata": { + "collapsed": false, + "pycharm": { + "name": "#%% md\n" + } + } } ], "metadata": { @@ -279,4 +295,4 @@ }, "nbformat": 4, "nbformat_minor": 1 -} +} \ No newline at end of file diff --git a/gs_quant/documentation/05_factor_models/tutorials/Upload_Factor_Models.ipynb b/gs_quant/documentation/05_factor_models/tutorials/Upload_Factor_Models.ipynb index 6667f2b1..42b7d15c 100644 --- a/gs_quant/documentation/05_factor_models/tutorials/Upload_Factor_Models.ipynb +++ b/gs_quant/documentation/05_factor_models/tutorials/Upload_Factor_Models.ipynb @@ -56,10 +56,11 @@ "outputs": [], "source": [ "from gs_quant.models.risk_model import FactorRiskModel, RiskModelCalendar\n", + "from gs_quant.entities.entitlements import Group, Entitlements, User, EntitlementBlock\n", "\n", "\n", "risk_model_id = 'MY_MODEL'\n", - "description = 'My custom Factor Risk Model'\n", + "description = 'My Custom Factor Risk Model'\n", "risk_model_name = 'My Model'\n", "term = 'Medium'\n", "vendor = 'Goldman Sachs'\n", @@ -67,14 +68,15 @@ "version = 1\n", "coverage = 'Country'\n", "\n", - "entitlements = {\n", - " \"execute\": [],\n", - " \"edit\": [],\n", - " \"view\": [],\n", - " \"admin\": [],\n", - " \"query\": [],\n", - " \"upload\": []\n", - "}" + "many_users = User.get_many(emails=[\"first_user@email.com\", \"second_user@email.com\", \"third_user@email.com\", \"fourth_user@email.com\"])\n", + "many_groups = Group.get_many(group_ids=[\"group_id_for_query\",\"group_id_for_execute\", \"group_id_for_view\"])\n", + "\n", + "entitlements = Entitlements(admin=EntitlementBlock(users=many_users[0]),\n", + " edit=EntitlementBlock(users=many_users[0]),\n", + " upload=EntitlementBlock(users=many_users[0]),\n", + " query=EntitlementBlock(users=many_users[1], groups=many_groups[0]),\n", + " execute=EntitlementBlock(groups=many_groups[1]),\n", + " view=EntitlementBlock(groups=many_groups[2], users=many_users[3]))" ], "metadata": { "collapsed": false, @@ -119,7 +121,9 @@ " vendor,\n", " version,\n", " entitlements,\n", - " description)\n" + " description)\n", + "\n", + "my_model.save()" ], "metadata": { "collapsed": false, diff --git a/gs_quant/documentation/06_baskets/examples/05_basket_fundamentals_data/0002_get_basket_fundamentals_specific_period_and_direction.ipynb b/gs_quant/documentation/06_baskets/examples/05_basket_fundamentals_data/0002_get_basket_fundamentals_specific_period_and_direction.ipynb index 6ba614eb..261bbac9 100644 --- a/gs_quant/documentation/06_baskets/examples/05_basket_fundamentals_data/0002_get_basket_fundamentals_specific_period_and_direction.ipynb +++ b/gs_quant/documentation/06_baskets/examples/05_basket_fundamentals_data/0002_get_basket_fundamentals_specific_period_and_direction.ipynb @@ -6,8 +6,8 @@ "metadata": {}, "outputs": [], "source": [ + "from gs_quant.data.fields import DataMeasure\n", "from gs_quant.markets.baskets import Basket\n", - "from gs_quant.markets.indices_utils import FundamentalMetricPeriod, FundamentalMetricPeriodDirection\n", "from gs_quant.session import Environment, GsSession" ] }, @@ -38,9 +38,9 @@ "source": [ "You may choose one of the following periods:\n", "\n", - "* **1 year:** FundamentalMetricPeriod.*ONE_YEAR*\n", - "* **2 years:** FundamentalMetricPeriod.*TWO_YEARS*\n", - "* **3 years:** FundamentalMetricPeriod.*THREE_YEARS*" + "* **1 year:** DataMeasure.*ONE_YEAR*\n", + "* **2 years:** DataMeasure.*TWO_YEARS*\n", + "* **3 years:** DataMeasure.*THREE_YEARS*" ] }, { @@ -49,8 +49,8 @@ "source": [ "You may choose one of the following period directions:\n", "\n", - "* **Forward:** FundamentalMetricPeriodDirection.*FORWARD*\n", - "* **Trailing:** FundamentalMetricPeriodDirection.*TRAILING*" + "* **Forward:** DataMeasure.*FORWARD*\n", + "* **Trailing:** DataMeasure.*TRAILING*" ] }, { @@ -59,7 +59,7 @@ "metadata": {}, "outputs": [], "source": [ - "basket.get_fundamentals(period=FundamentalMetricPeriod.TWO_YEARS, direction=FundamentalMetricPeriodDirection.TRAILING)" + "basket.get_fundamentals(period=DataMeasure.TWO_YEARS, direction=DataMeasure.TRAILING)" ] } ], @@ -84,4 +84,4 @@ }, "nbformat": 4, "nbformat_minor": 4 -} +} \ No newline at end of file diff --git a/gs_quant/documentation/06_baskets/examples/05_basket_fundamentals_data/0003_get_specific_basket_fundamentals_metrics.ipynb b/gs_quant/documentation/06_baskets/examples/05_basket_fundamentals_data/0003_get_specific_basket_fundamentals_metrics.ipynb index 41266131..7b308afb 100644 --- a/gs_quant/documentation/06_baskets/examples/05_basket_fundamentals_data/0003_get_specific_basket_fundamentals_metrics.ipynb +++ b/gs_quant/documentation/06_baskets/examples/05_basket_fundamentals_data/0003_get_specific_basket_fundamentals_metrics.ipynb @@ -6,8 +6,8 @@ "metadata": {}, "outputs": [], "source": [ + "from gs_quant.data.fields import DataMeasure\n", "from gs_quant.markets.baskets import Basket\n", - "from gs_quant.markets.indices_utils import FundamentalsMetrics\n", "from gs_quant.session import Environment, GsSession" ] }, @@ -38,17 +38,17 @@ "source": [ "You may choose any combinations of the following metrics:\n", "\n", - "* **Dividend Yield:** FundamentalsMetrics.*DIVIDEND_YIELD*\n", - "* **Earnings per Share:** FundamentalsMetrics.*EARNINGS_PER_SHARE*\n", - "* **Earnings per Share Positive:** FundamentalsMetrics.*EARNINGS_PER_SHARE_POSITIVE*\n", - "* **Net Debt to EBITDA:** FundamentalsMetrics.*NET_DEBT_TO_EBITDA*\n", - "* **Price to Book:** FundamentalsMetrics.*PRICE_TO_BOOK*\n", - "* **Price to Cash:** FundamentalsMetrics.*PRICE_TO_CASH*\n", - "* **Price to Earnings:** FundamentalsMetrics.*PRICE_TO_EARNINGS*\n", - "* **Price to Earnings Positive:** FundamentalsMetrics.*PRICE_TO_EARNINGS_POSITIVE*\n", - "* **Price to Sales:** FundamentalsMetrics.*PRICE_TO_SALES*\n", - "* **Return on Equity:** FundamentalsMetrics.*RETURN_ON_EQUITY*\n", - "* **Sales per Share:** FundamentalsMetrics.*SALES_PER_SHARE*" + "* **Dividend Yield:** DataMeasure.*DIVIDEND_YIELD*\n", + "* **Earnings per Share:** DataMeasure.*EARNINGS_PER_SHARE*\n", + "* **Earnings per Share Positive:** DataMeasure.*EARNINGS_PER_SHARE_POSITIVE*\n", + "* **Net Debt to EBITDA:** DataMeasure.*NET_DEBT_TO_EBITDA*\n", + "* **Price to Book:** DataMeasure.*PRICE_TO_BOOK*\n", + "* **Price to Cash:** DataMeasure.*PRICE_TO_CASH*\n", + "* **Price to Earnings:** DataMeasure.*PRICE_TO_EARNINGS*\n", + "* **Price to Earnings Positive:** DataMeasure.*PRICE_TO_EARNINGS_POSITIVE*\n", + "* **Price to Sales:** DataMeasure.*PRICE_TO_SALES*\n", + "* **Return on Equity:** DataMeasure.*RETURN_ON_EQUITY*\n", + "* **Sales per Share:** DataMeasure.*SALES_PER_SHARE*" ] }, { @@ -57,7 +57,7 @@ "metadata": {}, "outputs": [], "source": [ - "basket.get_fundamentals(metrics=[FundamentalsMetrics.PRICE_TO_CASH, FundamentalsMetrics.SALES_PER_SHARE])" + "basket.get_fundamentals(metrics=[DataMeasure.PRICE_TO_CASH, DataMeasure.SALES_PER_SHARE])" ] } ], @@ -82,4 +82,4 @@ }, "nbformat": 4, "nbformat_minor": 4 -} +} \ No newline at end of file diff --git a/gs_quant/entities/entitlements.py b/gs_quant/entities/entitlements.py index f9a16864..176c7f77 100644 --- a/gs_quant/entities/entitlements.py +++ b/gs_quant/entities/entitlements.py @@ -30,9 +30,9 @@ class User: def __init__(self, user_id: str, - name: str, - email: str, - company: str): + name: str = None, + email: str = None, + company: str = None): self.__id = user_id self.__email = email self.__name = name @@ -105,7 +105,7 @@ def get_many(cls, """ user_ids = user_ids if user_ids else [] names = names if names else [] - emails = emails if emails else [] + emails = [email.lower() for email in emails] if emails else [] companies = companies if companies else [] if not user_ids + names + emails + companies: diff --git a/gs_quant/entities/entity.py b/gs_quant/entities/entity.py index ad50d36e..b1c6b0d3 100644 --- a/gs_quant/entities/entity.py +++ b/gs_quant/entities/entity.py @@ -371,24 +371,11 @@ def get_position_sets(self, raise NotImplementedError def update_positions(self, - position_sets: List[PositionSet], - schedule_reports: bool = True): + position_sets: List[PositionSet]): if self.positioned_entity_type == EntityType.PORTFOLIO: if not position_sets: return - existing_positions_dates = self.get_position_dates() - new_position_dates = [p.date for p in position_sets] if position_sets else [] - reports = [r.latest_end_date for r in self.get_reports() if r.latest_end_date] - latest_date_covered_by_reports = min(reports) if reports else None - latest_position_date_in_reports = max([d for d in existing_positions_dates - if d <= latest_date_covered_by_reports]) \ - if latest_date_covered_by_reports else min(new_position_dates) - start_date = min(latest_position_date_in_reports, min(new_position_dates)) - end_date = max(new_position_dates) GsPortfolioApi.update_positions(portfolio_id=self.id, position_sets=[p.to_target() for p in position_sets]) - if schedule_reports: - self._schedule_reports(start_date=start_date, - end_date=end_date) else: raise NotImplementedError @@ -462,14 +449,6 @@ def get_factor_risk_report(self, return FactorRiskReport.from_target(reports[0]) raise NotImplementedError - def create_report(self, report: Report): - if self.positioned_entity_type == EntityType.PORTFOLIO: - report.set_position_source(self.id) - report.save() - self._schedule_first_reports([pos_set.date for pos_set in self.get_position_sets()]) - return report - raise NotImplementedError - def poll_report(self, report_id: str, timeout: int = 600, step: int = 30) -> ReportStatus: poll = True timeout = 1800 if timeout > 1800 else timeout diff --git a/gs_quant/json_encoder.py b/gs_quant/json_encoder.py index 4b80d6b9..5c9ff530 100644 --- a/gs_quant/json_encoder.py +++ b/gs_quant/json_encoder.py @@ -24,7 +24,12 @@ def default(o): if isinstance(o, datetime.datetime): - return o.strftime('%Y-%m-%dT%H:%M:%S.') + '{:06d}'.format(o.microsecond)[:-3] + 'Z' + try: + iso_formatted = o.isoformat(timespec='milliseconds') + except TypeError: + # Pandas Timestamp objects don't take timespec, will raise TypeError (as of 1.2.4) + iso_formatted = o.isoformat() + return iso_formatted if o.tzinfo else iso_formatted + 'Z' # Make sure to be explict about timezone if isinstance(o, datetime.date): return o.isoformat() elif isinstance(o, Enum): diff --git a/gs_quant/markets/baskets.py b/gs_quant/markets/baskets.py index d0fbe9a1..b37c4e15 100644 --- a/gs_quant/markets/baskets.py +++ b/gs_quant/markets/baskets.py @@ -31,6 +31,7 @@ from gs_quant.api.gs.reports import GsReportApi from gs_quant.api.gs.users import GsUsersApi from gs_quant.common import DateLimit, PositionType +from gs_quant.data.fields import DataMeasure from gs_quant.entities.entity import EntityType, PositionedEntity from gs_quant.entities.entitlements import Entitlements as BasketEntitlements from gs_quant.errors import MqError, MqValueError @@ -449,9 +450,9 @@ def get_corporate_actions(self, def get_fundamentals(self, start: dt.date = DateLimit.LOW_LIMIT.value, end: dt.date = dt.date.today(), - period: FundamentalMetricPeriod = FundamentalMetricPeriod.ONE_YEAR.value, - direction: FundamentalMetricPeriodDirection = FundamentalMetricPeriodDirection.FORWARD.value, - metrics: List[FundamentalsMetrics] = FundamentalsMetrics.to_list()) -> pd.DataFrame: + period: DataMeasure = DataMeasure.ONE_YEAR.value, + direction: DataMeasure = DataMeasure.FORWARD.value, + metrics: List[DataMeasure] = DataMeasure.list_fundamentals()) -> pd.DataFrame: """ Retrieve fundamentals data for a basket across a date range @@ -470,11 +471,11 @@ def get_fundamentals(self, Retrieve historical dividend yield data for a basket + >>> from gs_quant.data.fields import DataMeasure >>> from gs_quant.markets.baskets import Basket - >>> from gs_quant.markets.indices_utils import FundamentalsMetrics >>> >>> basket = Basket.get("GSMBXXXX") - >>> basket.get_corporate_actions(metrics=[FundamentalsMetrics.DIVIDEND_YIELD]) + >>> basket.get_fundamentals(metrics=[DataMeasure.DIVIDEND_YIELD]) **See also** diff --git a/gs_quant/markets/index.py b/gs_quant/markets/index.py index ecd3071e..eb9d240c 100644 --- a/gs_quant/markets/index.py +++ b/gs_quant/markets/index.py @@ -103,17 +103,18 @@ def get(cls, identifier: str): def get_fundamentals(self, start: dt.date = DateLimit.LOW_LIMIT.value, end: dt.date = dt.date.today(), - period: FundamentalMetricPeriod = FundamentalMetricPeriod.ONE_YEAR.value, - direction: FundamentalMetricPeriodDirection = FundamentalMetricPeriodDirection.FORWARD.value, + period: DataMeasure = DataMeasure.ONE_YEAR.value, + direction: DataMeasure = DataMeasure.FORWARD.value, metrics: List[DataMeasure] = DataMeasure.list_fundamentals()) -> pd.DataFrame: """ Retrieve fundamentals data for an index across a date range. Currently supports STS indices only - :param start: start date (default minimum date value) + :param start: start date (default is 1 January, 1970) :param end: end date (default is today) - :param period: period for the relevant metric (default is 1y) - :param direction: direction of the outlook period (default is forward) - :param metrics: list of fundamentals metrics (default is all) + :param period: period for the relevant metric. Can be one of ONE_YEAR('1y'), TWO_YEARS('2y'), \ + THREE_YEARS('3y') (default is 1y) + :param direction: direction of the outlook period. Can be one of 'forward' or 'trailing' (default is forward) + :param metrics: list of fundamentals metrics. (default is all) :return: dataframe with fundamentals information **Usage** @@ -186,7 +187,7 @@ def get_close_price_for_date(self, """ Retrieve close prices for an index. Only STS indices support indicative prices. - :param date: date of the required prices (default today) + :param date: date of the required prices (default is today) :param price_type: Type of prices to return. Default returns official close price :return: dataframe with date's close prices @@ -230,7 +231,7 @@ def get_close_prices(self, """ Retrieve close prices for an index for a date range. Only STS indices support indicative prices. - :param start: start date (default is minimum date value) + :param start: start date (default is 1 January, 1970) :param end: end date (default is today) :param price_type: Type of prices to return. Default returns official close price :return: dataframe with the close price between start and end date diff --git a/gs_quant/markets/indices_utils.py b/gs_quant/markets/indices_utils.py index e4858bf8..44540574 100644 --- a/gs_quant/markets/indices_utils.py +++ b/gs_quant/markets/indices_utils.py @@ -95,47 +95,6 @@ def __repr__(self): return self.value -class FundamentalsMetrics(EnumBase, Enum): - """ Metric for the associated asset """ - DIVIDEND_YIELD = 'Dividend Yield' - EARNINGS_PER_SHARE = 'Earnings per Share' - EARNINGS_PER_SHARE_POSITIVE = 'Earnings per Share Positive' - NET_DEBT_TO_EBITDA = 'Net Debt to EBITDA' - PRICE_TO_BOOK = 'Price to Book' - PRICE_TO_CASH = 'Price to Cash' - PRICE_TO_EARNINGS = 'Price to Earnings' - PRICE_TO_EARNINGS_POSITIVE = 'Price to Earnings Positive' - PRICE_TO_SALES = 'Price to Sales' - RETURN_ON_EQUITY = 'Return on Equity' - SALES_PER_SHARE = 'Sales per Share' - - def __repr__(self): - return self.value - - @classmethod - def to_list(cls): - return [metric.value for metric in cls] - - -class FundamentalMetricPeriod(EnumBase, Enum): - """ Period for the relevant metric """ - ONE_YEAR = '1y' - TWO_YEARS = '2y' - THREE_YEARS = '3y' - - def __repr__(self): - return self.value - - -class FundamentalMetricPeriodDirection(EnumBase, Enum): - """ Direction of the outlook period """ - FORWARD = 'forward' - TRAILING = 'trailing' - - def __repr__(self): - return self.value - - class IndicesDatasets(EnumBase, Enum): """ Indices Datasets """ BASKET_FUNDAMENTALS = 'BASKET_FUNDAMENTALS' diff --git a/gs_quant/markets/portfolio_manager.py b/gs_quant/markets/portfolio_manager.py index 44c5a637..4a0ae60c 100644 --- a/gs_quant/markets/portfolio_manager.py +++ b/gs_quant/markets/portfolio_manager.py @@ -24,14 +24,45 @@ from gs_quant.api.gs.portfolios import GsPortfolioApi from gs_quant.datetime import business_day_offset from gs_quant.entities.entitlements import Entitlements +from gs_quant.entities.entity import PositionedEntity, EntityType from gs_quant.errors import MqValueError -from gs_quant.markets.report import FactorRiskReport, PerformanceReport, ReportJobFuture, Report -from gs_quant.target.reports import ReportType +from gs_quant.markets.report import ReportJobFuture +from gs_quant.target.portfolios import RiskAumSource _logger = logging.getLogger(__name__) -class PortfolioManager: +class CustomAUMDataPoint: + """ + + Custom AUM Data Point represents a portfolio's AUM value for a specific date + + """ + + def __init__(self, + date: dt.date, + aum: float): + self.__date = date + self.__aum = aum + + @property + def date(self) -> dt.date: + return self.__date + + @date.setter + def date(self, value: dt.date): + self.__date = value + + @property + def aum(self) -> float: + return self.__aum + + @aum.setter + def aum(self, value: float): + self.__aum = value + + +class PortfolioManager(PositionedEntity): """ Portfolio Manager is used to manage Marquee portfolios (setting entitlements, running and retrieving reports, etc) @@ -45,6 +76,7 @@ def __init__(self, :param portfolio_id: Portfolio ID """ self.__portfolio_id = portfolio_id + PositionedEntity.__init__(self, portfolio_id, EntityType.PORTFOLIO) @property def portfolio_id(self) -> str: @@ -54,20 +86,6 @@ def portfolio_id(self) -> str: def portfolio_id(self, value: str): self.__portfolio_id = value - def get_reports(self) -> List[Report]: - """ - Get a list of all reports associated with the portfolio - :return: list of Report objects - """ - reports = [] - reports_as_targets = GsPortfolioApi.get_reports(self.__portfolio_id) - for report_target in reports_as_targets: - if report_target.type in [ReportType.Portfolio_Factor_Risk, ReportType.Asset_Factor_Risk]: - reports.append(FactorRiskReport.from_target(report_target)) - if report_target.type == ReportType.Portfolio_Performance_Analytics: - reports.append(PerformanceReport.from_target(report_target)) - return reports - def schedule_reports(self, start_date: dt.date = None, end_date: dt.date = None, @@ -133,5 +151,50 @@ def get_schedule_dates(self, end_date = min(position_dates) else: start_date = min(position_dates) - end_date = max(position_dates) + end_date = business_day_offset(dt.date.today(), -1, roll='forward') return [start_date, end_date] + + def get_aum_source(self) -> RiskAumSource: + """ + Get portfolio AUM Source + :return: AUM Source + """ + portfolio = GsPortfolioApi.get_portfolio(self.portfolio_id) + return portfolio.aum_source if portfolio.aum_source is not None else RiskAumSource.Long + + def set_aum_source(self, + aum_source: RiskAumSource): + """ + Set portfolio AUM Source + :param aum_source: aum source for portfolio + :return: + """ + portfolio = GsPortfolioApi.get_portfolio(self.portfolio_id) + portfolio.aum_source = aum_source + GsPortfolioApi.update_portfolio(portfolio) + + def get_custom_aum(self, + start_date: dt.date = None, + end_date: dt.date = None) -> List[CustomAUMDataPoint]: + """ + Get AUM data for portfolio + :param start_date: start date + :param end_date: end date + :return: list of AUM data between the specified range + """ + aum_data = GsPortfolioApi.get_custom_aum(self.portfolio_id, start_date, end_date) + return [CustomAUMDataPoint(date=dt.datetime.strptime(data['date'], '%Y-%m-%d'), + aum=data['aum']) for data in aum_data] + + def upload_custom_aum(self, + aum_data: List[CustomAUMDataPoint], + clear_existing_data: bool = None): + """ + Add AUM data for portfolio + :param aum_data: list of AUM data to upload + :param clear_existing_data: delete all previously uploaded AUM data for the portfolio + (defaults to false) + :return: + """ + formatted_aum_data = [{'date': data.date.strftime('%Y-%m-%d'), 'aum': data.aum} for data in aum_data] + GsPortfolioApi.upload_custom_aum(self.portfolio_id, formatted_aum_data, clear_existing_data) diff --git a/gs_quant/markets/report.py b/gs_quant/markets/report.py index 9f16ea23..e95fa008 100644 --- a/gs_quant/markets/report.py +++ b/gs_quant/markets/report.py @@ -17,19 +17,21 @@ from enum import Enum from time import sleep from typing import Tuple, Union, List, Dict + import pandas as pd +from dateutil.relativedelta import relativedelta from gs_quant.api.gs.data import GsDataApi from gs_quant.api.gs.portfolios import GsPortfolioApi from gs_quant.api.gs.reports import GsReportApi +from gs_quant.datetime import business_day_offset from gs_quant.errors import MqValueError -from gs_quant.models.risk_model import ReturnFormat from gs_quant.markets.report_utils import _get_ppaa_batches - +from gs_quant.models.risk_model import ReturnFormat from gs_quant.target.common import ReportParameters, Currency from gs_quant.target.coordinates import MDAPIDataBatchResponse from gs_quant.target.data import DataQuery, DataQueryResponse -from gs_quant.target.reports import Report as TargetReport +from gs_quant.target.reports import Report as TargetReport, FactorRiskTableMode, OrderType from gs_quant.target.reports import ReportType, PositionSourceType, ReportStatus @@ -37,6 +39,8 @@ class ReportDataset(Enum): PPA_DATASET = "PPA" PFR_DATASET = "PFR" AFR_DATASET = "AFR" + PTA_DATASET = "PTA" + PTAA_DATASET = "PTAA" PORTFOLIO_CONSTITUENTS = "PORTFOLIO_CONSTITUENTS" @@ -92,6 +96,7 @@ def __init__(self, position_source_type: Union[str, PositionSourceType] = None, report_type: Union[str, ReportType] = None, parameters: ReportParameters = None, + earliest_start_date: dt.date = None, latest_end_date: dt.date = None, latest_execution_time: dt.datetime = None, status: Union[str, ReportStatus] = ReportStatus.new, @@ -105,6 +110,7 @@ def __init__(self, self.__type = report_type if isinstance(report_type, ReportType) or report_type is None \ else ReportType(report_type) self.__parameters = parameters + self.__earliest_start_date = earliest_start_date self.__latest_end_date = latest_end_date self.__latest_execution_time = latest_execution_time self.__status = status if isinstance(status, ReportStatus) else ReportStatus(status) @@ -150,6 +156,10 @@ def parameters(self) -> ReportParameters: def parameters(self, value: ReportParameters): self.__parameters = value + @property + def earliest_start_date(self) -> dt.date: + return self.__earliest_start_date + @property def latest_end_date(self) -> dt.date: return self.__latest_end_date @@ -170,20 +180,7 @@ def percentage_complete(self) -> float: def get(cls, report_id: str, acceptable_types: List[ReportType] = None): - # This map cant be instantiated / stored at the top of this file, bc the Factor/RiskReport classes aren't - # defined there. Don't know the best place to put this - report_type_to_class_type = { - ReportType.Portfolio_Factor_Risk: type(FactorRiskReport()), - ReportType.Asset_Factor_Risk: type(FactorRiskReport()), - ReportType.Portfolio_Performance_Analytics: type(PerformanceReport()) - } - - report = GsReportApi.get_report(report_id=report_id) - if acceptable_types is not None and report.type not in acceptable_types: - raise MqValueError('Unexpected report type found.') - if report.type in report_type_to_class_type: - return report_type_to_class_type[report.type].from_target(report) - return Report.from_target(report) + return cls.from_target(GsReportApi.get_report(report_id)) @classmethod def from_target(cls, @@ -194,6 +191,7 @@ def from_target(cls, position_source_type=report.position_source_type, report_type=report.type, parameters=report.parameters, + earliest_start_date=report.earliest_start_date, latest_end_date=report.latest_end_date, latest_execution_time=report.latest_execution_time, status=report.status, @@ -237,7 +235,8 @@ def get_most_recent_job(self): def schedule(self, start_date: dt.date = None, - end_date: dt.date = None): + end_date: dt.date = None, + backcast: bool = None): if None in [self.id, self.__position_source_id]: raise MqValueError('Can only schedule reports with valid IDs and Position Source IDs.') if self.position_source_type != PositionSourceType.Portfolio and None in [start_date, end_date]: @@ -247,28 +246,44 @@ def schedule(self, if len(position_dates) == 0: raise MqValueError('Cannot schedule reports for a portfolio with no positions.') if start_date is None: - start_date = min(position_dates) + start_date = business_day_offset(min(position_dates) - relativedelta(years=1), -1, roll='forward') \ + if backcast else min(position_dates) if end_date is None: - end_date = max(position_dates) + end_date = min(position_dates) if backcast else max(position_dates) GsReportApi.schedule_report(report_id=self.id, start_date=start_date, - end_date=end_date) + end_date=end_date, + backcast=backcast) def run(self, - start_date: dt.date, - end_date: dt.date, + start_date: dt.date = None, + end_date: dt.date = None, + backcast: bool = None, is_async: bool = True): - self.schedule(start_date, end_date) - job_future = self.get_most_recent_job() - if is_async: - return job_future - counter = 100 + self.schedule(start_date, end_date, backcast) + counter = 5 while counter > 0: - if job_future.done(): - return job_future.result() - sleep(6) - raise MqValueError(f'Your report {self.id} is taking longer than expected to finish. Please contact the ' - 'Marquee Analytics team at gs-marquee-analytics-support@gs.com') + try: + job_future = self.get_most_recent_job() + if is_async: + return job_future + counter = 100 + while counter > 0: + if job_future.done(): + return job_future.result() + sleep(6) + raise MqValueError( + f'Your report {self.id} is taking longer than expected to finish. Please contact the ' + 'Marquee Analytics team at gs-marquee-analytics-support@gs.com') + except IndexError: + counter -= 1 + status = Report.get(self.id).status + if status == ReportStatus.waiting: + raise MqValueError(f'Your report {self.id} is stuck in "waiting" status and therefore cannot be run at ' + 'this time.') + raise MqValueError(f'Your report {self.id} is taking longer to run than expected. ' + 'Please reach out to the Marquee Analytics team at gs-marquee-analytics-support@gs.com ' + 'for assistance.') class PerformanceReport(Report): @@ -279,21 +294,21 @@ def __init__(self, position_source_id: str = None, position_source_type: Union[str, PositionSourceType] = None, parameters: ReportParameters = None, + earliest_start_date: dt.date = None, latest_end_date: dt.date = None, latest_execution_time: dt.datetime = None, status: Union[str, ReportStatus] = ReportStatus.new, percentage_complete: float = None, **kwargs): super().__init__(report_id, name, position_source_id, position_source_type, - ReportType.Portfolio_Performance_Analytics, parameters, latest_end_date, latest_execution_time, - status, percentage_complete) + ReportType.Portfolio_Performance_Analytics, parameters, earliest_start_date, latest_end_date, + latest_execution_time, status, percentage_complete) @classmethod def get(cls, report_id: str, **kwargs): - return super(PerformanceReport, cls).get(report_id=report_id, - acceptable_types=[ReportType.Portfolio_Performance_Analytics]) + return cls.from_target(GsReportApi.get_report(report_id)) @classmethod def from_target(cls, @@ -306,6 +321,7 @@ def from_target(cls, position_source_type=report.position_source_type, report_type=report.type, parameters=report.parameters, + earliest_start_date=report.earliest_start_date, latest_end_date=report.latest_end_date, latest_execution_time=report.latest_execution_time, status=report.status, @@ -429,6 +445,7 @@ def __init__(self, position_source_id: str = None, position_source_type: Union[str, PositionSourceType] = None, report_type: Union[str, ReportType] = None, + earliest_start_date: dt.date = None, latest_end_date: dt.date = None, latest_execution_time: dt.datetime = None, status: Union[str, ReportStatus] = ReportStatus.new, @@ -436,15 +453,14 @@ def __init__(self, **kwargs): super().__init__(report_id, name, position_source_id, position_source_type, report_type, ReportParameters(risk_model=risk_model_id, - fx_hedged=fx_hedged), + fx_hedged=fx_hedged), earliest_start_date, latest_end_date, latest_execution_time, status, percentage_complete) @classmethod def get(cls, report_id: str, **kwargs): - return super().get(report_id=report_id, - acceptable_types=[ReportType.Portfolio_Factor_Risk, ReportType.Asset_Factor_Risk]) + return cls.from_target(GsReportApi.get_report(report_id)) @classmethod def from_target(cls, @@ -457,6 +473,7 @@ def from_target(cls, position_source_id=report.position_source_id, position_source_type=report.position_source_type, report_type=report.type, + earliest_start_date=report.earliest_start_date, latest_end_date=report.latest_end_date, status=report.status, percentage_complete=report.percentage_complete) @@ -479,6 +496,36 @@ def get_results(self, end_date=end_date) return pd.DataFrame(results) if return_format == ReturnFormat.DATA_FRAME else results + def get_table(self, + mode: FactorRiskTableMode = None, + factors: List[str] = None, + factor_categories: List[str] = None, + date: dt.date = None, + currency: Currency = None, + order_by_column: str = None, + order_type: OrderType = None, + return_format: ReturnFormat = ReturnFormat.DATA_FRAME) -> Union[Dict, pd.DataFrame]: + table = GsReportApi.get_factor_risk_report_table(risk_report_id=self.id, + mode=mode, + factors=factors, + factor_categories=factor_categories, + currency=currency, + date=date, + order_by_column=order_by_column, + order_type=order_type) + if return_format == ReturnFormat.DATA_FRAME: + column_info = table.get('table').get('metadata').get('columnInfo') + column_info[0].update({'columns': ['name', 'symbol', 'sector']}) + rows = table.get('table').get('rows') + sorted_columns = [] + for column_group in column_info: + sorted_columns = sorted_columns + column_group.get('columns') + rows_data_frame = pd.DataFrame(rows) + rows_data_frame = rows_data_frame[sorted_columns] + rows_data_frame = rows_data_frame.set_index('name') + return rows_data_frame + return table + def get_factor_pnl(self, factor_name: str, start_date: dt.date = None, @@ -533,3 +580,83 @@ def get_daily_risk(self, end_date=end_date, currency=currency) return factor_data.filter(items=['date', 'dailyRisk']) + + +class ThematicReport(Report): + + def __init__(self, + report_id: str = None, + name: str = None, + position_source_id: str = None, + parameters: ReportParameters = None, + earliest_start_date: dt.date = None, + latest_end_date: dt.date = None, + latest_execution_time: dt.datetime = None, + status: Union[str, ReportStatus] = ReportStatus.new, + percentage_complete: float = None, + **kwargs): + super().__init__(report_id, name, position_source_id, PositionSourceType.Portfolio, + ReportType.Portfolio_Thematic_Analytics, parameters, earliest_start_date, latest_end_date, + latest_execution_time, status, percentage_complete) + + @classmethod + def get(cls, + report_id: str, + **kwargs): + return cls.from_target(GsReportApi.get_report(report_id)) + + @classmethod + def from_target(cls, + report: TargetReport): + if report.type != ReportType.Portfolio_Thematic_Analytics: + raise MqValueError('This report is not a portfolio thematic report.') + return ThematicReport(report_id=report.id, + name=report.name, + position_source_id=report.position_source_id, + parameters=report.parameters, + earliest_start_date=report.earliest_start_date, + latest_end_date=report.latest_end_date, + latest_execution_time=report.latest_execution_time, + status=report.status, + percentage_complete=report.percentage_complete) + + def get_thematic_data(self, + start_date: dt.date = None, + end_date: dt.date = None, + basket_ids: List[str] = None) -> pd.DataFrame: + pta_results = self._get_pta_measures(["thematicExposure", "grossExposure"], start_date, end_date, basket_ids, + ReturnFormat.JSON) + for result in pta_results: + result['thematicBeta'] = result['thematicExposure'] / result['grossExposure'] + return pd.DataFrame(pta_results) + + def get_thematic_exposure(self, + start_date: dt.date = None, + end_date: dt.date = None, + basket_ids: List[str] = None) -> pd.DataFrame: + return self._get_pta_measures(["thematicExposure"], start_date, end_date, basket_ids) + + def get_thematic_betas(self, + start_date: dt.date = None, + end_date: dt.date = None, + basket_ids: List[str] = None) -> pd.DataFrame: + pta_results = self._get_pta_measures(["thematicExposure", "grossExposure"], start_date, end_date, basket_ids, + ReturnFormat.JSON) + for result in pta_results: + result['thematicBeta'] = result['thematicExposure'] / result['grossExposure'] + result.pop('thematicExposure') + result.pop('grossExposure') + return pd.DataFrame(pta_results) + + def _get_pta_measures(self, + fields: List, + start_date: dt.date = None, + end_date: dt.date = None, + basket_ids: List[str] = None, + return_format: ReturnFormat = ReturnFormat.DATA_FRAME) -> Union[Dict, pd.DataFrame]: + where = {'reportId': self.id} + if basket_ids: + where['basketId'] = basket_ids + query = DataQuery(where=where, fields=fields, start_date=start_date, end_date=end_date) + results = GsDataApi.query_data(query=query, dataset_id=ReportDataset.PTA_DATASET.value) + return pd.DataFrame(results) if return_format == ReturnFormat.DATA_FRAME else results diff --git a/gs_quant/markets/screens.py b/gs_quant/markets/screens.py new file mode 100644 index 00000000..019a65b3 --- /dev/null +++ b/gs_quant/markets/screens.py @@ -0,0 +1,475 @@ +""" +Copyright 2019 Goldman Sachs. +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, +software distributed under the License is distributed on an +"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +KIND, either express or implied. See the License for the +specific language governing permissions and limitations +under the License. +""" +import logging +from datetime import date +from enum import Enum, unique +from typing import Union, Tuple + +from pandas import DataFrame +from pydash import set_, get + +from gs_quant.api.gs.screens import GsScreenApi +from gs_quant.errors import MqValueError +from gs_quant.target.assets_screener import AssetScreenerCreditRequestFilters, AssetScreenerRequest, \ + AssetScreenerRequestFilterLimits, AssetScreenerRequestStringOptions +from gs_quant.target.screens import Screen as TargetScreen, ScreenParameters as TargetScreenParameters +from gs_quant.common import Currency as CurrencyImport + +logging.root.setLevel('INFO') + + +class RangeFilter: + """ Respresents asset filters that are ranges """ + + def __init__(self, min_: Union[float, str] = None, max_: Union[float, str] = None): + self.__min = min_ + self.__max = max_ + + def __str__(self) -> str: + range_filter = f'{{Min: {self.min}, Max: {self.max}}}' + return range_filter + + @property + def min(self) -> Union[float, str]: + return self.__min + + @min.setter + def min(self, value: Union[float, str]): + self.__min = value + + @property + def max(self) -> Union[float, str]: + return self.__max + + @max.setter + def max(self, value: Union[float, str]): + self.__max = value + + +@unique +class CheckboxType(Enum): + INCLUDE = "Include" + EXCLUDE = "Exclude" + + +@unique +class Sector(Enum): + COMMUNICATION_SERVICES = "Communication Services" + CONSUMER_DISCRETIONARY = "Consumer Discretionary" + CONSUMER_STAPLES = "Consumer Staples" + ENERGY = "Energy" + FINANCIALS = "Financials" + HEALTH_CARE = "Health Care" + INDUSTRIALS = "Industrials" + INFORMATION_TECHNOLOGY = "Information Technology" + MATERIALS = "Materials" + REAL_ESTATE = "Real Estate" + UTILITIES = "Utilities" + + +@unique +class Seniority(Enum): + JUNIOR_SUBORDINATE = "Junior Subordinate" + SENIOR = "Senior" + SENIOR_SUBORDINATE = "Senior Subordinate" + SUBORDINATE = "Subordinate" + + +@unique +class Direction(Enum): + BUY = "Buy" + SELL = "Sell" + + +@unique +class Currency(CurrencyImport, Enum): + pass + + +class CheckboxFilter: + """ Represents asset filters that have multiple enumerated options""" + + def __init__(self, checkbox_type: CheckboxType = None, selections: Tuple[Enum] = None): + self.__selections = selections + self.__checkbox_type = checkbox_type + + def __str__(self) -> str: + checkbox_filter = f'{{Type: {self.checkbox_type}, Selections: {self.selections}}}' + return checkbox_filter + + @property + def checkbox_type(self) -> CheckboxType: + return self.__checkbox_type + + @checkbox_type.setter + def checkbox_type(self, value: CheckboxType): + self.__checkbox_type = value + + @property + def selections(self) -> Tuple[Enum]: + return self.__selections + + @selections.setter + def selections(self, value: Tuple[Enum]): + self.__selections = value + + def add(self, new_selections: Tuple[Enum]): + new_selections = set(new_selections) + old_selections = set(self.selections) + self.selections = tuple(set(new_selections).union(set(old_selections))) + + def remove(self, remove_selections: Tuple[Enum]): + remove_selections = set(remove_selections) + old_selections = set(self.selections) + self.selections = tuple(old_selections.difference(remove_selections)) + + +class ScreenFilters: + def __init__(self, + face_value: float = 1000000, + direction: str = "Buy", + liquidity_score: RangeFilter = RangeFilter(), + gs_charge_bps: RangeFilter = RangeFilter(), + gs_charge_dollars: RangeFilter = RangeFilter(), + duration: RangeFilter = RangeFilter(), + yield_: RangeFilter = RangeFilter(), + spread: RangeFilter = RangeFilter(), + z_spread: RangeFilter = RangeFilter(), + g_spread: RangeFilter = RangeFilter(), + mid_price: RangeFilter = RangeFilter(), + maturity: RangeFilter = RangeFilter(), + amount_outstanding: RangeFilter = RangeFilter(), + letter_rating: RangeFilter = RangeFilter(), + seniority: CheckboxFilter = CheckboxFilter(), + currency: CheckboxFilter = CheckboxFilter(), + sector: CheckboxFilter = CheckboxFilter()): + self.__face_value = face_value + self.__direction = direction + self.__liquidity_score = liquidity_score + self.__gs_charge_bps = gs_charge_bps + self.__gs_charge_dollars = gs_charge_dollars + self.__duration = duration + self.__yield_ = yield_ + self.__spread = spread + self.__z_spread = z_spread + self.__g_spread = g_spread + self.__mid_price = mid_price + self.__maturity = maturity + self.__amount_outstanding = amount_outstanding + self.__rating = letter_rating + self.__seniority = seniority + self.__currency = currency + self.__sector = sector + + def __str__(self) -> str: + to_return = {} + filter_names = self.__dict__.keys() + for name in filter_names: + if self.__dict__[name]: + to_return[name] = self.__dict__[name].__str__() + return str(to_return) + + @property + def face_value(self) -> float: + """Face value of the bond.""" + return self.__face_value + + @face_value.setter + def face_value(self, value: float): + self.__face_value = value + + @property + def direction(self) -> str: + """Whether the position is a buy or sell.""" + return self.__direction + + @direction.setter + def direction(self, value: str): + self.__direction = value + + @property + def liquidity_score(self) -> RangeFilter: + """Liquidity score assigned to buying/selling the bond.""" + return self.__liquidity_score + + @liquidity_score.setter + def liquidity_score(self, value: RangeFilter): + self.__validate_range_settings(min_=1, max_=6, value=self.__liquidity_score) + self.__liquidity_score = value + + @property + def gs_charge_bps(self) -> RangeFilter: + """Goldman Sachs' indicative charge of the bond (bps).""" + return self.__gs_charge_bps + + @gs_charge_bps.setter + def gs_charge_bps(self, value: RangeFilter): + self.__validate_range_settings(min_=0, max_=10, value=self.__gs_charge_bps) + self.__gs_charge_bps = value + + @property + def gs_charge_dollars(self) -> RangeFilter: + """Goldman Sachs' indicative charge of the bond (dollars).""" + return self.__gs_charge_dollars + + @gs_charge_dollars.setter + def gs_charge_dollars(self, value: RangeFilter): + self.__validate_range_settings(min_=0, max_=2, value=self.__gs_charge_dollars) + self.__gs_charge_dollars = value + + @property + def duration(self) -> RangeFilter: + """Measure of a bond's price sensitivity to changes in interest rates.""" + return self.__duration + + @duration.setter + def duration(self, value: RangeFilter): + self.__validate_range_settings(min_=0, max_=20, value=self.__duration) + self.__duration = value + + @property + def yield_(self) -> RangeFilter: + """Return an investor realizes on a bond sold at the mid price.""" + return self.__yield_ + + @yield_.setter + def yield_(self, value: RangeFilter): + self.__validate_range_settings(min_=0, max_=10, value=self.__yield_) + self.__yield_ = value + + @property + def spread(self) -> RangeFilter: + """Spread between the yields of a debt security and its benchmark when both are + purchased at bid price.""" + return self.__spread + + @spread.setter + def spread(self, value: RangeFilter): + self.__validate_range_settings(min_=0, max_=1000, value=self.__spread) + self.__spread = value + + @property + def z_spread(self) -> RangeFilter: + """Zero volatility spread of a bond.""" + return self.__z_spread + + @z_spread.setter + def z_spread(self, value: RangeFilter): + self.__z_spread = value + + @property + def g_spread(self) -> RangeFilter: + """Difference between yield on treasury bonds and yield on corporate bonds of same + maturity.""" + return self.__g_spread + + @g_spread.setter + def g_spread(self, value: RangeFilter): + self.__g_spread = value + + @property + def mid_price(self) -> RangeFilter: + """Mid price.""" + return self.__mid_price + + @mid_price.setter + def mid_price(self, value: RangeFilter): + self.__validate_range_settings(min_=0, max_=200, value=self.__mid_price) + self.__mid_price = value + + @property + def maturity(self) -> RangeFilter: + """Length of time bond owner will receive interest payments on the investment.""" + return self.__maturity + + @maturity.setter + def maturity(self, value: RangeFilter): + self.__validate_range_settings(min_=0, max_=40, value=self.__maturity) + self.__maturity = value + + @property + def amount_outstanding(self) -> RangeFilter: + """Aggregate principal amount of the total number of bonds not redeemed or + otherwise discharged.""" + return self.__amount_outstanding + + @amount_outstanding.setter + def amount_outstanding(self, value: RangeFilter): + self.__validate_range_settings(min_=0, max_=1000000000, value=self.__amount_outstanding) + self.__amount_outstanding = value + + @property + def rating(self) -> RangeFilter: + """S&P rating given to a bond.""" + return self.__rating + + @rating.setter + def rating(self, value: RangeFilter): + self.__rating = value + + @property + def seniority(self) -> CheckboxFilter: + """Seniority of the bond.""" + return self.__seniority + + @seniority.setter + def seniority(self, value: CheckboxFilter): + self.__seniority = value + + @property + def currency(self) -> CheckboxFilter: + """Currency of the bond.""" + return self.__currency + + @currency.setter + def currency(self, value: CheckboxFilter): + self.__currency = value + + @property + def sector(self) -> CheckboxFilter: + """Sector / industry of the bond.""" + return self.__sector + + @sector.setter + def sector(self, value: CheckboxFilter): + self.__sector = value + + @staticmethod + def __validate_range_settings(min_: int, max_: int, value: RangeFilter): + if value.min is None and value.max is None: + return + if value.min < min_ or value.max > max_: + raise MqValueError(f'Please ensure your min and max values are in the range of {min} <= x <= {max}') + + +class Screen: + """"Private variables""" + + def __init__(self, filters: ScreenFilters = None, screen_id: str = None, name: str = None): + if not filters: + self.__filters = ScreenFilters() + else: + self.__filters = filters + + self.__id = screen_id + self.__name = name if name is not None else f"Screen {date.today().strftime('%d-%b-%Y')}" + + @property + def id(self) -> str: + return self.__id + + @property + def name(self) -> str: + return self.__name + + @name.setter + def name(self, name: str): + self.__name = name + + @property + def filters(self) -> ScreenFilters: + return self.__filters + + @filters.setter + def filters(self, filters: ScreenFilters): + self.__filters = filters + + @classmethod + def get(cls, screen_id: str): + screen = GsScreenApi.get_screen(screen_id=screen_id) + return Screen.__from_target(screen) + + def calculate(self, format_: str = None): + """ Applies screen filters, returning assets that satisfy the condition(s) """ + filters = self.__to_target_filters() + payload = AssetScreenerRequest(filters=filters) + assets = GsScreenApi.calculate(payload) + dataframe = DataFrame(assets) + if format_ == 'json': + return dataframe['results'].to_json(indent=4) + if format_ == 'csv': + return dataframe.to_csv() + return dataframe + + def save(self): + """ Create a screen using GsScreenApi if it doesn't exist. Update the report if it does. """ + parameters = self.__to_target_parameters() + target_screen = TargetScreen(name=self.name, parameters=parameters) + if self.id: + target_screen.id = self.id + GsScreenApi.update_screen(target_screen) + else: + screen = GsScreenApi.create_screen(target_screen) + self.__id = screen.id + logging.info(f'New screen created with ID: {self.id} \n') + + def delete(self): + """ Hits GsScreensApi to delete a report """ + GsScreenApi.delete_screen(self.id) + + @classmethod + def __from_target(cls, screen): + return Screen(filters=screen.parameters, screen_id=screen.id, name=screen.name) + + def __to_target_filters(self) -> AssetScreenerCreditRequestFilters: + payload = {} + filters = self.__set_up_filters() + + for name in filters: + if name == 'face_value' or name == 'direction': + payload[name] = filters[name] + elif isinstance(filters[name], RangeFilter): + payload[name] = AssetScreenerRequestFilterLimits(min_=filters[name].min, max_=filters[name].max) + elif isinstance(filters[name], CheckboxFilter): + if filters[name].selections and filters[name].checkbox_type: + payload[name] = AssetScreenerRequestStringOptions(options=filters[name].selections, + type_=filters[name].checkbox_type) + return AssetScreenerCreditRequestFilters(**payload) + + def __set_up_filters(self) -> dict: + filters = {} + for prop in AssetScreenerCreditRequestFilters.properties(): + set_(filters, prop, get(self.__filters, prop)) + return filters + + def __to_target_parameters(self) -> TargetScreenParameters: + payload = {} + parameters = self.__set_up_parameters() + + for name in parameters: + if name == 'face_value' or name == 'direction': + payload[name] = parameters[name] + elif isinstance(parameters[name], RangeFilter): + payload[name] = AssetScreenerRequestFilterLimits(min_=parameters[name].min, max_=parameters[name].max) + elif isinstance(parameters[name], CheckboxFilter): + if parameters[name].selections and parameters[name].checkbox_type: + payload[name] = parameters[name].selections + return TargetScreenParameters(**payload) + + def __set_up_parameters(self) -> dict: + filter_to_parameter = {'face_value': 'face_value', 'direction': 'direction', + 'gs_liquidity_score': 'liquidity_score', 'gs_charge_bps': 'gs_charge_bps', + 'gs_charge_dollars': 'gs_charge_dollars', 'modified_duration': 'duration', + 'yield_to_convention': 'yield_', 'spread_to_benchmark': 'spread', 'z_spread': 'z_spread', + 'g_spread': 'g_spread', 'bval_mid_price': 'mid_price', 'maturity': 'maturity', + 'amount_outstanding': 'amount_outstanding', 'rating_standard_and_poors': 'rating', + 'seniority': 'seniority', 'currency': 'currency', 'sector': 'sector', + 'issue_date': 'issue_date'} + + parameters = {} + for prop in TargetScreenParameters.properties(): + set_(parameters, prop, get(self.__filters, filter_to_parameter[prop])) + return parameters diff --git a/gs_quant/models/factor_risk_model_utils.py b/gs_quant/models/factor_risk_model_utils.py index af8733f6..ebc1b45c 100644 --- a/gs_quant/models/factor_risk_model_utils.py +++ b/gs_quant/models/factor_risk_model_utils.py @@ -17,6 +17,9 @@ import pandas as pd import datetime as dt import math + +from gs_quant.target.risk_models import RiskModelData + from gs_quant.api.gs.risk_models import GsFactorRiskModelApi @@ -126,51 +129,118 @@ def divide_request(data, n): yield data[i:i + n] -def batch_and_upload_partial_data(model_id: str, data: dict): +def batch_and_upload_partial_data(model_id: str, data: dict, max_asset_size): """ Takes in total risk model data for one day and batches requests according to asset data size, returns a list of messages from resulting post calls""" date = data.get('date') - target_universe_size = len(data.get('assetData').get('universe')) - factor_data = { - 'date': date, - 'factorData': data.get('factorData'), - 'covarianceMatrix': data.get('covarianceMatrix')} - print('Uploading factor data') - print(GsFactorRiskModelApi.upload_risk_model_data( - model_id, - factor_data, - partial_upload=True) - ) - split_num = math.ceil(target_universe_size / 20000) if math.ceil(target_universe_size / 20000) else 1 - split_idx = math.ceil(target_universe_size / split_num) - for i in range(split_num): - end_idx = (i + 1) * split_idx if split_num != i + 1 else target_universe_size + 1 - asset_data_subset = {'universe': data.get('assetData').get('universe')[i * split_idx:end_idx], - 'specificRisk': data.get('assetData').get('specificRisk')[i * split_idx:end_idx], - 'factorExposure': data.get('assetData').get('factorExposure')[i * split_idx:end_idx]} - optional_asset_inputs = ['totalRisk', 'historicalBeta'] - for optional_input in optional_asset_inputs: - if data.get('assetData').get(optional_input): - asset_data_subset[optional_input] = data.get('assetData').get(optional_input)[i * split_idx:end_idx] - - asset_data_request = {'date': date, 'assetData': asset_data_subset} + if data.get('factorData'): + factor_data = { + 'date': date, + 'factorData': data.get('factorData'), + 'covarianceMatrix': data.get('covarianceMatrix')} + print('Uploading factor data') print(GsFactorRiskModelApi.upload_risk_model_data( model_id, - asset_data_request, - partial_upload=True, - target_universe_size=target_universe_size) + factor_data, + partial_upload=True) ) + if data.get('assetData'): + asset_data_list, target_size = _batch_input_data({'assetData': data.get('assetData')}, max_asset_size) + for asset_data_batch in asset_data_list: + print(GsFactorRiskModelApi.upload_risk_model_data( + model_id, + {'assetData': asset_data_batch, 'date': date}, + partial_upload=True, + target_universe_size=target_size) + ) + if 'issuerSpecificCovariance' in data.keys() or 'factorPortfolios' in data.keys(): - optional_data = {} - for optional_input in ['issuerSpecificCovariance', 'factorPortfolios']: - if data.get(optional_input): - optional_data[optional_input] = data.get(optional_input) - print(f'{list(optional_data.keys())} being uploaded for {date}...') - optional_data['date'] = date - print(GsFactorRiskModelApi.upload_risk_model_data( - model_id, - optional_data, - partial_upload=True, - target_universe_size=target_universe_size) - ) + for optional_input_key in ['issuerSpecificCovariance', 'factorPortfolios']: + if data.get(optional_input_key): + optional_data = data.get(optional_input_key) + optional_data_list, target_size = _batch_input_data({optional_input_key: optional_data}, max_asset_size) + print(f'{optional_input_key} being uploaded for {date}...') + for optional_data_batch in optional_data_list: + print(GsFactorRiskModelApi.upload_risk_model_data( + model_id, + {optional_input_key: optional_data_batch, 'date': date}, + partial_upload=True, + target_universe_size=target_size) + ) + + +def risk_model_data_to_json(risk_model_data: RiskModelData) -> dict: + risk_model_data = risk_model_data.to_json() + risk_model_data['assetData'] = risk_model_data.get('assetData').to_json() + if risk_model_data.get('factorPortfolios'): + risk_model_data['factorPortfolios'] = risk_model_data.get('factorPortfolios').to_json() + risk_model_data['factorPortfolios']['portfolio'] = [portfolio.to_json() for portfolio in + risk_model_data.get('factorPortfolios').get( + 'portfolio')] + if risk_model_data.get('issuerSpecificCovariance'): + risk_model_data['issuerSpecificCovariance'] = risk_model_data.get('issuerSpecificCovariance').to_json() + return risk_model_data + + +def get_universe_size(data_to_split: dict) -> int: + data_to_split = list(data_to_split.values())[0] + if 'universe' in data_to_split.keys(): + return len(data_to_split.get('universe')) + else: + return len(set(data_to_split.get('universeId1') + + data_to_split.get('universeId1'))) + + +def _batch_input_data(input_data: dict, max_asset_size: int): + data_key = list(input_data.keys())[0] + target_universe_size = get_universe_size(input_data) + split_num = math.ceil(target_universe_size / max_asset_size) if math.ceil( + target_universe_size / max_asset_size) else 1 + split_idx = math.ceil(target_universe_size / split_num) + batched_data_list = [] + for i in range(split_num): + if data_key == 'assetData': + data_batched = _batch_asset_input(input_data.get('assetData'), i, split_idx, split_num, + target_universe_size) + elif data_key == 'factorPortfolios': + data_batched = _batch_pfp_input(input_data.get('factorPortfolios'), i, split_idx, + split_num, target_universe_size) + else: + data_batched = _batch_isc_input(input_data.get('issuerSpecificCovariance'), i, split_idx, split_num, + target_universe_size) + batched_data_list.append(data_batched) + return batched_data_list, target_universe_size + + +def _batch_asset_input(input_data: dict, i: int, split_idx: int, split_num: int, target_universe_size: int) -> dict: + end_idx = (i + 1) * split_idx if split_num != i + 1 else target_universe_size + 1 + asset_data_subset = {'universe': input_data.get('universe')[i * split_idx:end_idx], + 'specificRisk': input_data.get('specificRisk')[i * split_idx:end_idx], + 'factorExposure': input_data.get('factorExposure')[i * split_idx:end_idx]} + optional_asset_inputs = ['totalRisk', 'historicalBeta'] + for optional_input in optional_asset_inputs: + if input_data.get(optional_input): + asset_data_subset[optional_input] = input_data.get(optional_input)[i * split_idx:end_idx] + return asset_data_subset + + +def _batch_pfp_input(input_data: dict, i: int, split_idx: int, split_num: int, target_universe_size: int) -> dict: + end_idx = (i + 1) * split_idx if split_num != i + 1 else target_universe_size + 1 + pfp_data_subset = dict() + universe_slice = input_data.get('universe')[i * split_idx:end_idx] + pfp_data_subset['universe'] = universe_slice + portfolio_slice = list() + for portfolio in input_data.get('portfolio'): + factor_id = portfolio.get('factorId') + weights_slice = portfolio.get('weights')[i * split_idx:end_idx] + portfolio_slice.append({"factorId": factor_id, "weights": weights_slice}) + pfp_data_subset['portfolio'] = portfolio_slice + return pfp_data_subset + + +def _batch_isc_input(input_data: dict, i: int, split_idx: int, split_num: int, target_universe_size: int) -> dict: + end_idx = (i + 1) * split_idx if split_num != i + 1 else target_universe_size + 1 + return {'universeId1': input_data.get('universeId1')[i * split_idx:end_idx], + 'universeId2': input_data.get('universeId2')[i * split_idx:end_idx], + 'covariance': input_data.get('factorExposure')[i * split_idx:end_idx]} diff --git a/gs_quant/models/risk_model.py b/gs_quant/models/risk_model.py index cb4b3d22..95c1bce0 100644 --- a/gs_quant/models/risk_model.py +++ b/gs_quant/models/risk_model.py @@ -22,7 +22,7 @@ from gs_quant.api.gs.risk_models import GsFactorRiskModelApi, GsRiskModelApi from gs_quant.models.factor_risk_model_utils import build_asset_data_map, build_factor_data_map, \ build_factor_data_dataframe, build_pfp_data_dataframe, get_isc_dataframe, get_covariance_matrix_dataframe, \ - get_closest_date_index, divide_request, batch_and_upload_partial_data + get_closest_date_index, divide_request, batch_and_upload_partial_data, risk_model_data_to_json, get_universe_size from gs_quant.target.common import Enum from gs_quant.target.risk_models import RiskModel as RiskModelBuilder from gs_quant.target.risk_models import RiskModelData, RiskModelCalendar, RiskModelFactor, \ @@ -88,16 +88,24 @@ def delete(self): """ Delete existing risk model object from Marquee """ return GsRiskModelApi.delete_risk_model(self.id) - def get_dates(self, start_date: dt.date = None, end_date: dt.date = None) -> List: + def get_dates(self, start_date: dt.date = None, end_date: dt.date = None) -> List[str]: """ Get risk model dates for existing risk model - :param start_date: list returned including and after start_date - :param end_date: list returned up to and including end_date """ + + :param start_date: list returned including and after start_date + :param end_date: list returned up to and including end_date + + :return: list of dates where risk model data is present + """ return GsRiskModelApi.get_risk_model_dates(self.id, start_date, end_date) def get_calendar(self, start_date: dt.date = None, end_date: dt.date = None) -> RiskModelCalendar: """ Get risk model calendar for existing risk model between start and end date - :param start_date: list returned including and after start_date - :param end_date: list returned up to and including end_date """ + + :param start_date: list returned including and after start_date + :param end_date: list returned up to and including end_date + + :return: RiskModelCalendar for model + """ calendar = GsRiskModelApi.get_risk_model_calendar(self.id) if not start_date and not end_date: return calendar @@ -108,15 +116,19 @@ def get_calendar(self, start_date: dt.date = None, end_date: dt.date = None) -> def upload_calendar(self, calendar: RiskModelCalendar): """ Upload risk model calendar to existing risk model - :param calendar: RiskModelCalendar containing list of dates where model data is expected""" + + :param calendar: RiskModelCalendar containing list of dates where model data is expected + """ return GsRiskModelApi.upload_risk_model_calendar(self.id, calendar) def get_missing_dates(self, end_date: dt.date = None): """ Get any dates where data is not published according to expected days returned from the risk model calendar - :param end_date: date to truncate missing dates at + + :param end_date: date to truncate missing dates at If no end_date is provided, end_date defaults to T-1 date according - to the risk model calendar """ + to the risk model calendar + """ posted_results = self.get_dates() if not end_date: end_date = dt.date.today() - dt.timedelta(days=1) @@ -146,16 +158,19 @@ def __init__(self, entitlements: Union[Dict, Entitlements] = None, description: str = None): """ Create new factor risk model object - :param model_id: risk model id (cannot be changed) - :param name: risk model name - :param coverage: coverage of risk model asset universe - :param term: horizon term - :param universe_identifier: identifier used in asset universe upload (cannot be changed) - :param vendor: risk model vendor - :param version: version of model - :param entitlements: entitlements associated with risk model - :param description: risk model description - :return: Factor Risk Model object """ + + :param model_id: risk model id (cannot be changed) + :param name: risk model name + :param coverage: coverage of risk model asset universe + :param term: horizon term + :param universe_identifier: identifier used in asset universe upload (cannot be changed) + :param vendor: risk model vendor + :param version: version of model + :param entitlements: entitlements associated with risk model + :param description: risk model description + + :return: FactorRiskModel object + """ super().__init__(model_id, name, entitlements=entitlements, description=description) self.__coverage = coverage if isinstance(coverage, CoverageType) else CoverageType(coverage) self.__term = term if isinstance(term, Term) else Term(term) @@ -213,8 +228,11 @@ def coverage(self, coverage: Union[CoverageType, str]): @classmethod def get(cls, model_id: str): """ Get a factor risk model from Marquee - :param model_id: risk model id corresponding to Marquee Factor Risk Model - :return: Factor Risk Model object """ + + :param model_id: risk model id corresponding to Marquee Factor Risk Model + + :return: Factor Risk Model object + """ model = GsRiskModelApi.get_risk_model(model_id) return FactorRiskModel(model_id, model.name, @@ -226,7 +244,39 @@ def get(cls, model_id: str): entitlements=model.entitlements, description=model.description) - def upload(self): + @classmethod + def get_many(cls, + ids: List[str] = None, + terms: List[str] = None, + vendors: List[str] = None, + names: List[str] = None, + coverages: List[str] = None): + """ Get a factor risk model from Marquee + + :param ids: list of model identifiers in Marquee + :param terms: list of model terms + :param vendors: list of model vendors + :param names: list of model names + :param coverages: list of model coverages + + :return: Factor Risk Model object + """ + models = GsRiskModelApi.get_risk_models(ids=ids, + terms=terms, + vendors=vendors, + names=names, + coverages=coverages) + return [FactorRiskModel(model.id, + model.name, + model.coverage, + model.term, + model.universe_identifier, + model.vendor, + model.version, + entitlements=model.entitlements, + description=model.description) for model in models] + + def save(self): """ Upload current Factor Risk Model object to Marquee """ new_model = RiskModelBuilder(self.coverage, self.id, @@ -248,26 +298,36 @@ def update(self): def get_factor(self, factor_id: str) -> RiskModelFactor: """ Get risk model factor from model and factor ids - :param factor_id: factor identifier associated with risk model - :return: Risk Model Factor object """ + + :param factor_id: factor identifier associated with risk model + + :return: Risk Model Factor object + """ return GsFactorRiskModelApi.get_risk_model_factor(self.id, factor_id) def create_factor(self, factor: RiskModelFactor) -> RiskModelFactor: """ Create a new risk model factor - :param factor: factor object - :return: Risk Model Factor object """ + + :param factor: factor object + :return: Risk Model Factor object + """ return GsFactorRiskModelApi.create_risk_model_factor(self.id, factor) def update_factor(self, factor_id: str, factor: RiskModelFactor) -> RiskModelFactor: """ Update existing risk model factor - :param factor_id: factor identifier associated with risk model to update - :param factor: factor object associated with risk model - :return: Risk Model Factor object """ + + :param factor_id: factor identifier associated with risk model to update + :param factor: factor object associated with risk model + + :return: Risk Model Factor object + """ return GsFactorRiskModelApi.update_risk_model_factor(self.id, factor_id, factor) def delete_factor(self, factor_id: str): """ Delete a risk model factor - :param factor_id: factor identifier associated with risk model to delete """ + + :param factor_id: factor identifier associated with risk model to delete + """ GsFactorRiskModelApi.delete_risk_model_factor(self.id, factor_id) def get_factor_data(self, @@ -277,12 +337,15 @@ def get_factor_data(self, include_performance_curve: bool = False, format: ReturnFormat = ReturnFormat.DATA_FRAME) -> Union[List[Dict], pd.DataFrame]: """ Get factor data for existing risk model - :param start_date: start date for data request - :param end_date: end date for data request - :param identifiers: list of factor ids associated with risk model - :param include_performance_curve: request to include the performance curve of the factors - :param format: which format to return the results in - :return: risk model factor data """ + + :param start_date: start date for data request + :param end_date: end date for data request + :param identifiers: list of factor ids associated with risk model + :param include_performance_curve: request to include the performance curve of the factors + :param format: which format to return the results in + + :return: risk model factor data + """ factor_data = GsFactorRiskModelApi.get_risk_model_factor_data( self.id, start_date, @@ -300,12 +363,14 @@ def get_asset_universe(self, assets: DataAssetsRequest = DataAssetsRequest(UniverseIdentifier.gsid, []), format: ReturnFormat = ReturnFormat.DATA_FRAME) -> Union[List[Dict], pd.DataFrame]: """ Get asset universe data for existing risk model - :param start_date: start date for data request - :param end_date: end date for data request - :param assets: DataAssetsRequest object with identifier and list of assets to retrieve for request - :param format: which format to return the results in - :return: risk model universe """ + :param start_date: start date for data request + :param end_date: end date for data request + :param assets: DataAssetsRequest object with identifier and list of assets to retrieve for request + :param format: which format to return the results in + + :return: risk model universe + """ if not assets.universe and not end_date: end_date = start_date results = GsFactorRiskModelApi.get_risk_model_data( @@ -329,11 +394,14 @@ def get_historical_beta(self, assets: DataAssetsRequest = DataAssetsRequest(UniverseIdentifier.gsid, []), format: ReturnFormat = ReturnFormat.DATA_FRAME) -> Union[List[Dict], pd.DataFrame]: """ Get historical beta data for existing risk model - :param start_date: start date for data request - :param end_date: end date for data request - :param assets: DataAssetsRequest object with identifier and list of assets to retrieve for request - :param format: which format to return the results in - :return: historical beta for assets requested """ + + :param start_date: start date for data request + :param end_date: end date for data request + :param assets: DataAssetsRequest object with identifier and list of assets to retrieve for request + :param format: which format to return the results in + + :return: historical beta for assets requested + """ results = GsFactorRiskModelApi.get_risk_model_data( model_id=self.id, start_date=start_date, @@ -354,11 +422,14 @@ def get_total_risk(self, assets: DataAssetsRequest = DataAssetsRequest(UniverseIdentifier.gsid, []), format: ReturnFormat = ReturnFormat.DATA_FRAME) -> Union[List[Dict], pd.DataFrame]: """ Get total risk data for existing risk model - :param start_date: start date for data request - :param end_date: end date for data request - :param assets: DataAssetsRequest object with identifier and list of assets to retrieve for request - :param format: which format to return the results in - :return: total risk for assets requested """ + + :param start_date: start date for data request + :param end_date: end date for data request + :param assets: DataAssetsRequest object with identifier and list of assets to retrieve for request + :param format: which format to return the results in + + :return: total risk for assets requested + """ results = GsFactorRiskModelApi.get_risk_model_data( model_id=self.id, start_date=start_date, @@ -379,11 +450,14 @@ def get_specific_risk(self, assets: DataAssetsRequest = DataAssetsRequest(UniverseIdentifier.gsid, []), format: ReturnFormat = ReturnFormat.DATA_FRAME) -> Union[List[Dict], pd.DataFrame]: """ Get specific risk data for existing risk model - :param start_date: start date for data request - :param end_date: end date for data request - :param assets: DataAssetsRequest object with identifier and list of assets to retrieve for request - :param format: which format to return the results in - :return: specific risk for assets requested """ + + :param start_date: start date for data request + :param end_date: end date for data request + :param assets: DataAssetsRequest object with identifier and list of assets to retrieve for request + :param format: which format to return the results in + + :return: specific risk for assets requested + """ results = GsFactorRiskModelApi.get_risk_model_data( model_id=self.id, start_date=start_date, @@ -404,11 +478,14 @@ def get_residual_variance(self, assets: DataAssetsRequest = DataAssetsRequest(UniverseIdentifier.gsid, []), format: ReturnFormat = ReturnFormat.DATA_FRAME) -> Union[List[Dict], pd.DataFrame]: """ Get residual variance data for existing risk model - :param start_date: start date for data request - :param end_date: end date for data request - :param assets: DataAssetsRequest object with identifier and list of assets to retrieve for request - :param format: which format to return the results in - :return: residual variance for assets requested """ + + :param start_date: start date for data request + :param end_date: end date for data request + :param assets: DataAssetsRequest object with identifier and list of assets to retrieve for request + :param format: which format to return the results in + + :return: residual variance for assets requested + """ results = GsFactorRiskModelApi.get_risk_model_data( model_id=self.id, start_date=start_date, @@ -429,11 +506,14 @@ def get_universe_factor_exposure(self, assets: DataAssetsRequest = DataAssetsRequest(UniverseIdentifier.gsid, []), format: ReturnFormat = ReturnFormat.DATA_FRAME) -> Union[List[Dict], pd.DataFrame]: """ Get universe factor exposure data for existing risk model - :param start_date: start date for data request - :param end_date: end date for data request - :param assets: DataAssetsRequest object with identifier and list of assets to retrieve for request - :param format: which format to return the results in - :return: factor exposure for assets requested """ + + :param start_date: start date for data request + :param end_date: end date for data request + :param assets: DataAssetsRequest object with identifier and list of assets to retrieve for request + :param format: which format to return the results in + + :return: factor exposure for assets requested + """ results = GsFactorRiskModelApi.get_risk_model_data( model_id=self.id, start_date=start_date, @@ -458,10 +538,13 @@ def get_factor_returns_by_name(self, end_date: dt.date = None, format: ReturnFormat = ReturnFormat.DATA_FRAME) -> Union[Dict, pd.DataFrame]: """ Get factor return data for existing risk model keyed by name - :param start_date: start date for data request - :param end_date: end date for data request - :param format: which format to return the results in - :return: factor returns by name """ + + :param start_date: start date for data request + :param end_date: end date for data request + :param format: which format to return the results in + + :return: factor returns by name + """ results = GsFactorRiskModelApi.get_risk_model_data( model_id=self.id, start_date=start_date, @@ -479,10 +562,13 @@ def get_factor_returns_by_id(self, end_date: dt.date = None, format: ReturnFormat = ReturnFormat.DATA_FRAME) -> Union[Dict, pd.DataFrame]: """ Get factor return data for existing risk model keyed by factor id - :param start_date: start date for data request - :param end_date: end date for data request - :param format: which format to return the results in - :return: factor returns by factor id """ + + :param start_date: start date for data request + :param end_date: end date for data request + :param format: which format to return the results in + + :return: factor returns by factor id + """ results = GsFactorRiskModelApi.get_risk_model_data( model_id=self.id, start_date=start_date, @@ -500,10 +586,13 @@ def get_covariance_matrix(self, end_date: dt.date = None, format: ReturnFormat = ReturnFormat.DATA_FRAME) -> Union[Dict, pd.DataFrame]: """ Get covariance matrix data for existing risk model - :param start_date: start date for data request - :param end_date: end date for data request - :param format: which format to return the results in - :return: covariance matrix of daily factor returns """ + + :param start_date: start date for data request + :param end_date: end date for data request + :param format: which format to return the results in + + :return: covariance matrix of daily factor returns + """ results = GsFactorRiskModelApi.get_risk_model_data( model_id=self.id, start_date=start_date, @@ -521,11 +610,14 @@ def get_issuer_specific_covariance(self, assets: DataAssetsRequest = DataAssetsRequest(UniverseIdentifier.gsid, []), format: ReturnFormat = ReturnFormat.DATA_FRAME) -> Union[Dict, pd.DataFrame]: """ Get issuer specific covariance data for existing risk model - :param start_date: start date for data request - :param end_date: end date for data request - :param assets: DataAssetsRequest object with identifier and list of assets to retrieve for request - :param format: which format to return the results in - :return: issuer specific covariance matrix (covariance of assets with the same issuer) """ + + :param start_date: start date for data request + :param end_date: end date for data request + :param assets: DataAssetsRequest object with identifier and list of assets to retrieve for request + :param format: which format to return the results in + + :return: issuer specific covariance matrix (covariance of assets with the same issuer) + """ isc = GsFactorRiskModelApi.get_risk_model_data( model_id=self.id, start_date=start_date, @@ -543,11 +635,14 @@ def get_factor_portfolios(self, assets: DataAssetsRequest = DataAssetsRequest(UniverseIdentifier.gsid, []), format: ReturnFormat = ReturnFormat.DATA_FRAME) -> Union[Dict, pd.DataFrame]: """ Get factor portfolios data for existing risk model - :param start_date: start date for data request - :param end_date: end date for data request - :param assets: DataAssetsRequest object with identifier and list of assets to retrieve for request - :param format: which format to return the results in - :return: factor portfolios data """ + + :param start_date: start date for data request + :param end_date: end date for data request + :param assets: DataAssetsRequest object with identifier and list of assets to retrieve for request + :param format: which format to return the results in + + :return: factor portfolios data + """ results = GsFactorRiskModelApi.get_risk_model_data( model_id=self.id, start_date=start_date, @@ -566,13 +661,16 @@ def get_data(self, assets: DataAssetsRequest = DataAssetsRequest(UniverseIdentifier.gsid, []), limit_factors: bool = True) -> Dict: """ Get data for multiple measures for existing risk model - :param measures: list of measures for general risk model data request - :param start_date: start date for data request - :param end_date: end date for data request - :param assets: DataAssetsRequest object with identifier and list of assets to retrieve for request - :param limit_factors: limit factors included in factorData and covariance matrix to only include factors + + :param measures: list of measures for general risk model data request + :param start_date: start date for data request + :param end_date: end date for data request + :param assets: DataAssetsRequest object with identifier and list of assets to retrieve for request + :param limit_factors: limit factors included in factorData and covariance matrix to only include factors which the input universe has non-zero exposure to - :return: factor portfolios data """ + + :return: factor portfolios data + """ return GsFactorRiskModelApi.get_risk_model_data( model_id=self.id, start_date=start_date, @@ -582,29 +680,34 @@ def get_data(self, limit_factors=limit_factors ) - def upload_data(self, data: Union[RiskModelData, Dict]): + def upload_data(self, data: Union[RiskModelData, Dict], max_asset_batch_size: int = 20000): """ Upload risk model data to existing risk model in Marquee - :param data: complete risk model data for uploading on given date - includes: date, factorData, assetData, covarianceMatrix with optional inputs: - issuerSpecificCovariance and factorPortfolios - If upload universe is over 20000 assets, will batch and upload data in chunks of 20000 assets """ + :param data: complete risk model data for uploading on given date + includes: date, factorData, assetData, covarianceMatrix with optional inputs: + issuerSpecificCovariance and factorPortfolios + :param max_asset_batch_size: size of payload to batch with. Defaults to 20000 assets + + If upload universe is over 20000 max_asset_batch_size, will batch and upload data in chunks of 20000 assets + """ - data = data.to_json() if type(data) == RiskModelData else data - target_universe_size = len(data.get('assetData').get('universe')) - if target_universe_size > 20000: + data = risk_model_data_to_json(data) if type(data) == RiskModelData else data + target_universe_size = get_universe_size(data) + if target_universe_size > max_asset_batch_size: print('Batching uploads due to universe size') - batch_and_upload_partial_data(self.id, data) + batch_and_upload_partial_data(self.id, data, max_asset_batch_size) else: print(GsFactorRiskModelApi.upload_risk_model_data(self.id, data)) def upload_partial_data(self, data: RiskModelData, target_universe_size: float = None): """ Upload partial risk model data to existing risk model in Marquee - :param data: partial risk model data for uploading on given date - :param target_universe_size: the size of the complete universe on date - The models factorData and covarianceMatrix must be uploaded first on given date if repeats in partial - upload, newer posted data will replace existing data on upload day """ + :param data: partial risk model data for uploading on given date + :param target_universe_size: the size of the complete universe on date + + The models factorData and covarianceMatrix must be uploaded first on given date if repeats in partial + upload, newer posted data will replace existing data on upload day + """ print(GsFactorRiskModelApi.upload_risk_model_data( self.id, data, @@ -614,10 +717,12 @@ def upload_partial_data(self, data: RiskModelData, target_universe_size: float = def upload_asset_coverage_data(self, date: dt.date = None): """ Upload to the coverage dataset for given risk model and date - :param date: date to upload coverage data for, default date is last date from risk model calendar - Posting to the coverage dataset within in the last 5 days will enable the risk model to be seen in the - Marquee UI dropdown for users with "execute" capabilities """ + :param date: date to upload coverage data for, default date is last date from risk model calendar + + Posting to the coverage dataset within in the last 5 days will enable the risk model to be seen in the + Marquee UI dropdown for users with "execute" capabilities + """ if not date: date = self.get_dates()[-1] update_time = dt.datetime.today().strftime("%Y-%m-%dT%H:%M:%SZ") diff --git a/gs_quant/risk/measures.py b/gs_quant/risk/measures.py index 9d4e8f4c..d42b5981 100644 --- a/gs_quant/risk/measures.py +++ b/gs_quant/risk/measures.py @@ -351,6 +351,11 @@ def __call__(self, currency: str = None, 'Credit Theta', RiskMeasureType.Theta, asset_class=AssetClass.Credit) +CDATMSpread = __risk_measure_with_doc_string( + 'CDATMSpread', + 'Credit ATM Spread', + RiskMeasureType.ATM_Spread, + asset_class=AssetClass.Credit) CRIFIRCurve = __risk_measure_with_doc_string( 'CRIFIRCurve', 'CRIF IR Curve', diff --git a/gs_quant/risk/result_handlers.py b/gs_quant/risk/result_handlers.py index 24950c9b..3b6bc5d9 100644 --- a/gs_quant/risk/result_handlers.py +++ b/gs_quant/risk/result_handlers.py @@ -182,11 +182,10 @@ def risk_vector_handler(result: dict, risk_key: RiskKey, _instrument: Instrument ('mkt_quoting_style', 'quoteStyle'), ('value', 'value') ) - return __dataframe_handler(result['points'], mappings, risk_key, request_id=request_id) -def risk_theta_handler(result: dict, risk_key: RiskKey, _instrument: InstrumentBase, +def risk_float_handler(result: dict, risk_key: RiskKey, _instrument: InstrumentBase, request_id: Optional[str] = None) -> FloatWithInfo: return FloatWithInfo(risk_key, result['values'][0], request_id=request_id) @@ -233,7 +232,8 @@ def unsupported_handler(_result: dict, risk_key: RiskKey, _instrument: Instrumen 'Risk': risk_handler, 'RiskByClass': risk_by_class_handler, 'RiskVector': risk_vector_handler, - 'RiskTheta': risk_theta_handler, + 'RiskSecondOrderVector': risk_float_handler, + 'RiskTheta': risk_float_handler, 'Market': market_handler, 'Unsupported': unsupported_handler } diff --git a/gs_quant/target/assets_screener.py b/gs_quant/target/assets_screener.py index 4bb07074..b3caf920 100644 --- a/gs_quant/target/assets_screener.py +++ b/gs_quant/target/assets_screener.py @@ -14,11 +14,502 @@ under the License. """ -from typing import Union - -from gs_quant.base import Base, camel_case_translate, get_enum_value -from gs_quant.common import AssetScreenerRequestFilterLimits, AssetScreenerRequestFilterDateLimits, \ - AssetScreenerCreditStandardAndPoorsRatingOptions, AssetClass, AssetType +from gs_quant.target.common import * +import datetime +from typing import Mapping, Tuple, Union +from enum import Enum +from gs_quant.base import Base, EnumBase, InstrumentBase, camel_case_translate, get_enum_value + + +class AssetClass(EnumBase, Enum): + + """Asset classification of security. Assets are classified into broad groups which + exhibit similar characteristics and behave in a consistent way under + different market conditions""" + + Cash = 'Cash' + Commod = 'Commod' + Credit = 'Credit' + Cross_Asset = 'Cross Asset' + Econ = 'Econ' + Equity = 'Equity' + Fund = 'Fund' + FX = 'FX' + Mortgage = 'Mortgage' + Rates = 'Rates' + Loan = 'Loan' + Social = 'Social' + Cryptocurrency = 'Cryptocurrency' + + def __repr__(self): + return self.value + + +class AssetType(EnumBase, Enum): + + """Asset type differentiates the product categorization or contract type""" + + Access = 'Access' + AssetSwapFxdFlt = 'AssetSwapFxdFlt' + AssetSwapFxdFxd = 'AssetSwapFxdFxd' + Any = 'Any' + AveragePriceOption = 'AveragePriceOption' + Basis = 'Basis' + BasisSwap = 'BasisSwap' + Benchmark = 'Benchmark' + Benchmark_Rate = 'Benchmark Rate' + Binary = 'Binary' + Bond = 'Bond' + BondFuture = 'BondFuture' + BondFutureOption = 'BondFutureOption' + BondOption = 'BondOption' + Calendar_Spread = 'Calendar Spread' + Cap = 'Cap' + Cash = 'Cash' + Certificate = 'Certificate' + CD = 'CD' + Cliquet = 'Cliquet' + CMSOption = 'CMSOption' + CMSOptionStrip = 'CMSOptionStrip' + CMSSpreadOption = 'CMSSpreadOption' + CMSSpreadOptionStrip = 'CMSSpreadOptionStrip' + Commodity = 'Commodity' + CommodityReferencePrice = 'CommodityReferencePrice' + CommodVarianceSwap = 'CommodVarianceSwap' + CommodityPowerNode = 'CommodityPowerNode' + CommodityPowerAggregatedNodes = 'CommodityPowerAggregatedNodes' + CommodityEUNaturalGasHub = 'CommodityEUNaturalGasHub' + CommodityNaturalGasHub = 'CommodityNaturalGasHub' + Company = 'Company' + Convertible = 'Convertible' + Credit_Basket = 'Credit Basket' + Cross = 'Cross' + CSL = 'CSL' + Currency = 'Currency' + Custom_Basket = 'Custom Basket' + Cryptocurrency = 'Cryptocurrency' + Default_Swap = 'Default Swap' + DoubleKnockout = 'DoubleKnockout' + DoubleTouch = 'DoubleTouch' + Economic = 'Economic' + Endowment = 'Endowment' + Equity_Basket = 'Equity Basket' + EuropeanKnockout = 'EuropeanKnockout' + ETF = 'ETF' + ETN = 'ETN' + Event = 'Event' + FRA = 'FRA' + FixedLeg = 'FixedLeg' + Fixing = 'Fixing' + FloatLeg = 'FloatLeg' + Floor = 'Floor' + Forward = 'Forward' + Fund = 'Fund' + Future = 'Future' + FutureContract = 'FutureContract' + FutureMarket = 'FutureMarket' + FutureOption = 'FutureOption' + FutureStrategy = 'FutureStrategy' + FXForward = 'FXForward' + Hedge_Fund = 'Hedge Fund' + Index = 'Index' + IndexOption = 'IndexOption' + InflationSwap = 'InflationSwap' + Inter_Commodity_Spread = 'Inter-Commodity Spread' + InvoiceSpread = 'InvoiceSpread' + Knockout = 'Knockout' + Market_Location = 'Market Location' + MLF = 'MLF' + Multi_Asset_Allocation = 'Multi-Asset Allocation' + MultiCrossBinary = 'MultiCrossBinary' + MultiCrossBinaryLeg = 'MultiCrossBinaryLeg' + Mutual_Fund = 'Mutual Fund' + Note = 'Note' + OneTouch = 'OneTouch' + Option = 'Option' + OptionLeg = 'OptionLeg' + OptionStrategy = 'OptionStrategy' + Peer_Group = 'Peer Group' + Pension_Fund = 'Pension Fund' + Preferred_Stock = 'Preferred Stock' + Physical = 'Physical' + Precious_Metal = 'Precious Metal' + Precious_Metal_Swap = 'Precious Metal Swap' + Precious_Metal_RFQ = 'Precious Metal RFQ' + Reference_Entity = 'Reference Entity' + Research_Basket = 'Research Basket' + Rate = 'Rate' + Risk_Premia = 'Risk Premia' + Roll = 'Roll' + Securities_Lending_Loan = 'Securities Lending Loan' + Share_Class = 'Share Class' + Single_Stock = 'Single Stock' + Swap = 'Swap' + SwapLeg = 'SwapLeg' + SwapStrategy = 'SwapStrategy' + Swaption = 'Swaption' + Synthetic = 'Synthetic' + Systematic_Hedging = 'Systematic Hedging' + VarianceSwap = 'VarianceSwap' + VolatilitySwap = 'VolatilitySwap' + VolVarSwap = 'VolVarSwap' + WeatherIndex = 'WeatherIndex' + XccySwap = 'XccySwap' + XccySwapFixFix = 'XccySwapFixFix' + XccySwapFixFlt = 'XccySwapFixFlt' + XccySwapMTM = 'XccySwapMTM' + + def __repr__(self): + return self.value + + +class Currency(EnumBase, Enum): + + """Currency, ISO 4217 currency code or exchange quote modifier (e.g. GBP vs GBp)""" + + _ = '' + ACU = 'ACU' + ADP = 'ADP' + AED = 'AED' + AFA = 'AFA' + ALL = 'ALL' + AMD = 'AMD' + ANG = 'ANG' + AOA = 'AOA' + AOK = 'AOK' + AON = 'AON' + ARA = 'ARA' + ARS = 'ARS' + ARZ = 'ARZ' + ATS = 'ATS' + AUD = 'AUD' + AUZ = 'AUZ' + AZM = 'AZM' + AZN = 'AZN' + B03 = 'B03' + BAD = 'BAD' + BAK = 'BAK' + BAM = 'BAM' + BBD = 'BBD' + BDN = 'BDN' + BDT = 'BDT' + BEF = 'BEF' + BGL = 'BGL' + BGN = 'BGN' + BHD = 'BHD' + BIF = 'BIF' + BMD = 'BMD' + BND = 'BND' + BOB = 'BOB' + BR6 = 'BR6' + BRE = 'BRE' + BRF = 'BRF' + BRL = 'BRL' + BRR = 'BRR' + BSD = 'BSD' + BTC = 'BTC' + BTN = 'BTN' + BTR = 'BTR' + BWP = 'BWP' + BYR = 'BYR' + BZD = 'BZD' + C23 = 'C23' + CAC = 'CAC' + CAD = 'CAD' + CAZ = 'CAZ' + CCI = 'CCI' + CDF = 'CDF' + CFA = 'CFA' + CHF = 'CHF' + CHZ = 'CHZ' + CLF = 'CLF' + CLP = 'CLP' + CLZ = 'CLZ' + CNH = 'CNH' + CNO = 'CNO' + CNY = 'CNY' + CNZ = 'CNZ' + COP = 'COP' + COZ = 'COZ' + CPB = 'CPB' + CPI = 'CPI' + CRC = 'CRC' + CUP = 'CUP' + CVE = 'CVE' + CYP = 'CYP' + CZH = 'CZH' + CZK = 'CZK' + DAX = 'DAX' + DEM = 'DEM' + DIJ = 'DIJ' + DJF = 'DJF' + DKK = 'DKK' + DOP = 'DOP' + DZD = 'DZD' + E51 = 'E51' + E52 = 'E52' + E53 = 'E53' + E54 = 'E54' + ECI = 'ECI' + ECS = 'ECS' + ECU = 'ECU' + EEK = 'EEK' + EF0 = 'EF0' + EGP = 'EGP' + ESP = 'ESP' + ETB = 'ETB' + EUR = 'EUR' + EUZ = 'EUZ' + F06 = 'F06' + FED = 'FED' + FIM = 'FIM' + FJD = 'FJD' + FKP = 'FKP' + FRF = 'FRF' + FT1 = 'FT1' + GBP = 'GBP' + GBZ = 'GBZ' + GEK = 'GEK' + GEL = 'GEL' + GHC = 'GHC' + GHS = 'GHS' + GHY = 'GHY' + GIP = 'GIP' + GLD = 'GLD' + GLR = 'GLR' + GMD = 'GMD' + GNF = 'GNF' + GQE = 'GQE' + GRD = 'GRD' + GTQ = 'GTQ' + GWP = 'GWP' + GYD = 'GYD' + HKB = 'HKB' + HKD = 'HKD' + HNL = 'HNL' + HRK = 'HRK' + HSI = 'HSI' + HTG = 'HTG' + HUF = 'HUF' + IDB = 'IDB' + IDO = 'IDO' + IDR = 'IDR' + IEP = 'IEP' + IGP = 'IGP' + ILS = 'ILS' + INO = 'INO' + INP = 'INP' + INR = 'INR' + IPA = 'IPA' + IPX = 'IPX' + IQD = 'IQD' + IRR = 'IRR' + IRS = 'IRS' + ISI = 'ISI' + ISK = 'ISK' + ISO = 'ISO' + ITL = 'ITL' + J05 = 'J05' + JMD = 'JMD' + JNI = 'JNI' + JOD = 'JOD' + JPY = 'JPY' + JPZ = 'JPZ' + JZ9 = 'JZ9' + KES = 'KES' + KGS = 'KGS' + KHR = 'KHR' + KMF = 'KMF' + KOR = 'KOR' + KPW = 'KPW' + KRW = 'KRW' + KWD = 'KWD' + KYD = 'KYD' + KZT = 'KZT' + LAK = 'LAK' + LBA = 'LBA' + LBP = 'LBP' + LHY = 'LHY' + LKR = 'LKR' + LRD = 'LRD' + LSL = 'LSL' + LSM = 'LSM' + LTL = 'LTL' + LUF = 'LUF' + LVL = 'LVL' + LYD = 'LYD' + MAD = 'MAD' + MDL = 'MDL' + MGF = 'MGF' + MKD = 'MKD' + MMK = 'MMK' + MNT = 'MNT' + MOP = 'MOP' + MRO = 'MRO' + MTP = 'MTP' + MUR = 'MUR' + MVR = 'MVR' + MWK = 'MWK' + MXB = 'MXB' + MXN = 'MXN' + MXP = 'MXP' + MXW = 'MXW' + MXZ = 'MXZ' + MYO = 'MYO' + MYR = 'MYR' + MZM = 'MZM' + MZN = 'MZN' + NAD = 'NAD' + ND3 = 'ND3' + NGF = 'NGF' + NGI = 'NGI' + NGN = 'NGN' + NIC = 'NIC' + NLG = 'NLG' + NOK = 'NOK' + NOZ = 'NOZ' + NPR = 'NPR' + NZD = 'NZD' + NZZ = 'NZZ' + O08 = 'O08' + OMR = 'OMR' + PAB = 'PAB' + PEI = 'PEI' + PEN = 'PEN' + PEZ = 'PEZ' + PGK = 'PGK' + PHP = 'PHP' + PKR = 'PKR' + PLN = 'PLN' + PLZ = 'PLZ' + PSI = 'PSI' + PTE = 'PTE' + PYG = 'PYG' + QAR = 'QAR' + R2K = 'R2K' + ROL = 'ROL' + RON = 'RON' + RSD = 'RSD' + RUB = 'RUB' + RUF = 'RUF' + RUR = 'RUR' + RWF = 'RWF' + SAR = 'SAR' + SBD = 'SBD' + SCR = 'SCR' + SDP = 'SDP' + SDR = 'SDR' + SEK = 'SEK' + SET = 'SET' + SGD = 'SGD' + SGS = 'SGS' + SHP = 'SHP' + SKK = 'SKK' + SLL = 'SLL' + SRG = 'SRG' + SSI = 'SSI' + STD = 'STD' + SUR = 'SUR' + SVC = 'SVC' + SVT = 'SVT' + SYP = 'SYP' + SZL = 'SZL' + T21 = 'T21' + T51 = 'T51' + T52 = 'T52' + T53 = 'T53' + T54 = 'T54' + T55 = 'T55' + T71 = 'T71' + TE0 = 'TE0' + TED = 'TED' + TF9 = 'TF9' + THB = 'THB' + THO = 'THO' + TMM = 'TMM' + TND = 'TND' + TNT = 'TNT' + TOP = 'TOP' + TPE = 'TPE' + TPX = 'TPX' + TRB = 'TRB' + TRL = 'TRL' + TRY = 'TRY' + TRZ = 'TRZ' + TTD = 'TTD' + TWD = 'TWD' + TZS = 'TZS' + UAH = 'UAH' + UCB = 'UCB' + UDI = 'UDI' + UFC = 'UFC' + UFZ = 'UFZ' + UGS = 'UGS' + UGX = 'UGX' + USB = 'USB' + USD = 'USD' + UVR = 'UVR' + UYP = 'UYP' + UYU = 'UYU' + UZS = 'UZS' + VAC = 'VAC' + VEB = 'VEB' + VEF = 'VEF' + VES = 'VES' + VND = 'VND' + VUV = 'VUV' + WST = 'WST' + XAF = 'XAF' + XAG = 'XAG' + XAU = 'XAU' + XPD = 'XPD' + XPT = 'XPT' + XCD = 'XCD' + XDR = 'XDR' + XEU = 'XEU' + XOF = 'XOF' + XPF = 'XPF' + YDD = 'YDD' + YER = 'YER' + YUD = 'YUD' + YUN = 'YUN' + ZAL = 'ZAL' + ZAR = 'ZAR' + ZAZ = 'ZAZ' + ZMK = 'ZMK' + ZMW = 'ZMW' + ZRN = 'ZRN' + ZRZ = 'ZRZ' + ZWD = 'ZWD' + AUd = 'AUd' + BWp = 'BWp' + EUr = 'EUr' + GBp = 'GBp' + ILs = 'ILs' + KWd = 'KWd' + MWk = 'MWk' + SGd = 'SGd' + SZl = 'SZl' + USd = 'USd' + ZAr = 'ZAr' + + def __repr__(self): + return self.value + + +class Region(EnumBase, Enum): + + """Regional classification for the asset""" + + _ = '' + Americas = 'Americas' + Asia = 'Asia' + EM = 'EM' + Europe = 'Europe' + Global = 'Global' + + def __repr__(self): + return self.value class AssetScreenerRequestStringOptions(Base): @@ -302,6 +793,306 @@ def sector(self, value: AssetScreenerRequestStringOptions): self.__sector = value +class AssetScreenerCreditResponseItem(Base): + + """Response object for credit asset screener.""" + + @camel_case_translate + def __init__( + self, + asset_id: str = None, + name: str = None, + entitlements: Entitlements = None, + entitlement_exclusions: EntitlementExclusions = None, + cusip: str = None, + isin: str = None, + bbid: str = None, + currency: Union[Currency, str] = None, + region: Union[Region, str] = None, + seniority: str = None, + rating_standard_and_poors: str = None, + gs_liquidity_score: float = None, + amount_outstanding: float = None, + maturity: float = None, + bval_mid_price: float = None, + yield_to_convention: float = None, + modified_duration: float = None, + spread_to_benchmark: float = None, + g_spread: float = None, + z_spread: float = None, + charge_in_dollars: str = None, + charge_in_bps: str = None, + direction: str = None, + face_value: float = None + ): + super().__init__() + self.asset_id = asset_id + self.name = name + self.entitlements = entitlements + self.entitlement_exclusions = entitlement_exclusions + self.cusip = cusip + self.isin = isin + self.bbid = bbid + self.currency = currency + self.region = region + self.seniority = seniority + self.rating_standard_and_poors = rating_standard_and_poors + self.gs_liquidity_score = gs_liquidity_score + self.amount_outstanding = amount_outstanding + self.maturity = maturity + self.bval_mid_price = bval_mid_price + self.yield_to_convention = yield_to_convention + self.modified_duration = modified_duration + self.spread_to_benchmark = spread_to_benchmark + self.g_spread = g_spread + self.z_spread = z_spread + self.charge_in_dollars = charge_in_dollars + self.charge_in_bps = charge_in_bps + self.direction = direction + self.face_value = face_value + + @property + def asset_id(self) -> str: + """Marquee unique asset identifier.""" + return self.__asset_id + + @asset_id.setter + def asset_id(self, value: str): + self._property_changed('asset_id') + self.__asset_id = value + + @property + def name(self) -> str: + """Display name of the asset""" + return self.__name + + @name.setter + def name(self, value: str): + self._property_changed('name') + self.__name = value + + @property + def entitlements(self) -> Entitlements: + """Defines the entitlements of a given resource.""" + return self.__entitlements + + @entitlements.setter + def entitlements(self, value: Entitlements): + self._property_changed('entitlements') + self.__entitlements = value + + @property + def entitlement_exclusions(self) -> EntitlementExclusions: + """Defines the exclusion entitlements of a given resource.""" + return self.__entitlement_exclusions + + @entitlement_exclusions.setter + def entitlement_exclusions(self, value: EntitlementExclusions): + self._property_changed('entitlement_exclusions') + self.__entitlement_exclusions = value + + @property + def cusip(self) -> str: + """Cusip Identifier""" + return self.__cusip + + @cusip.setter + def cusip(self, value: str): + self._property_changed('cusip') + self.__cusip = value + + @property + def isin(self) -> str: + """International Security Number""" + return self.__isin + + @isin.setter + def isin(self, value: str): + self._property_changed('isin') + self.__isin = value + + @property + def bbid(self) -> str: + """Bloomberg Identifier""" + return self.__bbid + + @bbid.setter + def bbid(self, value: str): + self._property_changed('bbid') + self.__bbid = value + + @property + def currency(self) -> Union[Currency, str]: + """Currency, ISO 4217 currency code or exchange quote modifier (e.g. GBP vs GBp)""" + return self.__currency + + @currency.setter + def currency(self, value: Union[Currency, str]): + self._property_changed('currency') + self.__currency = get_enum_value(Currency, value) + + @property + def region(self) -> Union[Region, str]: + """Regional classification for the asset""" + return self.__region + + @region.setter + def region(self, value: Union[Region, str]): + self._property_changed('region') + self.__region = get_enum_value(Region, value) + + @property + def seniority(self) -> str: + """The seniority of the bond""" + return self.__seniority + + @seniority.setter + def seniority(self, value: str): + self._property_changed('seniority') + self.__seniority = value + + @property + def rating_standard_and_poors(self) -> str: + """Bond rating from Standard And Poor's.""" + return self.__rating_standard_and_poors + + @rating_standard_and_poors.setter + def rating_standard_and_poors(self, value: str): + self._property_changed('rating_standard_and_poors') + self.__rating_standard_and_poors = value + + @property + def gs_liquidity_score(self) -> float: + """Goldman Sachs' liquidity score for the bond (1-5.99).""" + return self.__gs_liquidity_score + + @gs_liquidity_score.setter + def gs_liquidity_score(self, value: float): + self._property_changed('gs_liquidity_score') + self.__gs_liquidity_score = value + + @property + def amount_outstanding(self) -> float: + """The aggregate principal amount of the total number of bonds not redeemed or + otherwise discharged.""" + return self.__amount_outstanding + + @amount_outstanding.setter + def amount_outstanding(self, value: float): + self._property_changed('amount_outstanding') + self.__amount_outstanding = value + + @property + def maturity(self) -> float: + """Time to the bond's maturity.""" + return self.__maturity + + @maturity.setter + def maturity(self, value: float): + self._property_changed('maturity') + self.__maturity = value + + @property + def bval_mid_price(self) -> float: + """BVAL mid price.""" + return self.__bval_mid_price + + @bval_mid_price.setter + def bval_mid_price(self, value: float): + self._property_changed('bval_mid_price') + self.__bval_mid_price = value + + @property + def yield_to_convention(self) -> float: + """The bond's yield to convention.""" + return self.__yield_to_convention + + @yield_to_convention.setter + def yield_to_convention(self, value: float): + self._property_changed('yield_to_convention') + self.__yield_to_convention = value + + @property + def modified_duration(self) -> float: + """Price sensitivity of a bond when there is a change in the yield to maturity.""" + return self.__modified_duration + + @modified_duration.setter + def modified_duration(self, value: float): + self._property_changed('modified_duration') + self.__modified_duration = value + + @property + def spread_to_benchmark(self) -> float: + """Spread to benchmark.""" + return self.__spread_to_benchmark + + @spread_to_benchmark.setter + def spread_to_benchmark(self, value: float): + self._property_changed('spread_to_benchmark') + self.__spread_to_benchmark = value + + @property + def g_spread(self) -> float: + """Bid G Spread.""" + return self.__g_spread + + @g_spread.setter + def g_spread(self, value: float): + self._property_changed('g_spread') + self.__g_spread = value + + @property + def z_spread(self) -> float: + """Mid Z Spread.""" + return self.__z_spread + + @z_spread.setter + def z_spread(self, value: float): + self._property_changed('z_spread') + self.__z_spread = value + + @property + def charge_in_dollars(self) -> str: + """Goldman's charge to buy or sell the bond from you in $.""" + return self.__charge_in_dollars + + @charge_in_dollars.setter + def charge_in_dollars(self, value: str): + self._property_changed('charge_in_dollars') + self.__charge_in_dollars = value + + @property + def charge_in_bps(self) -> str: + """Goldman's charge to buy or sell the bond from you in bps.""" + return self.__charge_in_bps + + @charge_in_bps.setter + def charge_in_bps(self, value: str): + self._property_changed('charge_in_bps') + self.__charge_in_bps = value + + @property + def direction(self) -> str: + """Whether the position is a buy or sell.""" + return self.__direction + + @direction.setter + def direction(self, value: str): + self._property_changed('direction') + self.__direction = value + + @property + def face_value(self) -> float: + """The face value of the bond.""" + return self.__face_value + + @face_value.setter + def face_value(self, value: float): + self._property_changed('face_value') + self.__face_value = value + + class AssetScreenerRequest(Base): """Request object for asset screener.""" @@ -404,3 +1195,50 @@ def filters(self) -> AssetScreenerCreditRequestFilters: def filters(self, value: AssetScreenerCreditRequestFilters): self._property_changed('filters') self.__filters = value + + +class AssetScreenerCreditResponse(Base): + + @camel_case_translate + def __init__( + self, + total_results: int, + results: Tuple[AssetScreenerCreditResponseItem, ...] = None, + scroll_id: str = None, + name: str = None + ): + super().__init__() + self.total_results = total_results + self.results = results + self.scroll_id = scroll_id + self.name = name + + @property + def total_results(self) -> int: + """Total number of results that match the query.""" + return self.__total_results + + @total_results.setter + def total_results(self, value: int): + self._property_changed('total_results') + self.__total_results = value + + @property + def results(self) -> Tuple[AssetScreenerCreditResponseItem, ...]: + """Array of Asset Screener Credit Response Item objects""" + return self.__results + + @results.setter + def results(self, value: Tuple[AssetScreenerCreditResponseItem, ...]): + self._property_changed('results') + self.__results = value + + @property + def scroll_id(self) -> str: + """Scroll identifier to be used to retrieve the next batch of results""" + return self.__scroll_id + + @scroll_id.setter + def scroll_id(self, value: str): + self._property_changed('scroll_id') + self.__scroll_id = value diff --git a/gs_quant/target/common.py b/gs_quant/target/common.py index 4ad7f290..24539b96 100644 --- a/gs_quant/target/common.py +++ b/gs_quant/target/common.py @@ -4142,6 +4142,116 @@ def asset_id(self, value: str): self._property_changed('asset_id') self.__asset_id = value +class AssetScreenerCreditStandardAndPoorsRatingOptions(Base): + + """Options for credit screener rating filter.""" + + @camel_case_translate + def __init__( + self, + min_: str = None, + max_: str = None, + name: str = None + ): + super().__init__() + self.__min = min_ + self.__max = max_ + self.name = name + + @property + def min(self) -> str: + """Minimum rating the user chooses to filter on""" + return self.__min + + @min.setter + def min(self, value: str): + self._property_changed('min') + self.__min = value + + @property + def max(self) -> str: + """Maximum rating the user chooses to filter on""" + return self.__max + + @max.setter + def max(self, value: str): + self._property_changed('max') + self.__max = value + + +class AssetScreenerRequestFilterDateLimits(Base): + + """Min and max date limits for filters on asset screener.""" + + @camel_case_translate + def __init__( + self, + min_: datetime.date = None, + max_: datetime.date = None, + name: str = None + ): + super().__init__() + self.__min = min_ + self.__max = max_ + self.name = name + + @property + def min(self) -> datetime.date: + """lower constraint value""" + return self.__min + + @min.setter + def min(self, value: datetime.date): + self._property_changed('min') + self.__min = value + + @property + def max(self) -> datetime.date: + """upper constraint value""" + return self.__max + + @max.setter + def max(self, value: datetime.date): + self._property_changed('max') + self.__max = value + + +class AssetScreenerRequestFilterLimits(Base): + + """Min and max limits for filters on asset screener.""" + + @camel_case_translate + def __init__( + self, + min_: float = None, + max_: float = None, + name: str = None + ): + super().__init__() + self.__min = min_ + self.__max = max_ + self.name = name + + @property + def min(self) -> float: + """lower constraint value""" + return self.__min + + @min.setter + def min(self, value: float): + self._property_changed('min') + self.__min = value + + @property + def max(self) -> float: + """upper constraint value""" + return self.__max + + @max.setter + def max(self, value: float): + self._property_changed('max') + self.__max = value + class CSLDate(Base): diff --git a/gs_quant/target/reports.py b/gs_quant/target/reports.py index 93c1d9b6..ce9e9017 100644 --- a/gs_quant/target/reports.py +++ b/gs_quant/target/reports.py @@ -21,7 +21,21 @@ from gs_quant.base import Base, EnumBase, InstrumentBase, camel_case_translate, get_enum_value -class PositionSourceType(EnumBase, Enum): +class OrderType(EnumBase, Enum): + """Source object for position data""" + + Ascending = 'Ascending' + Descending = 'Descending' + + +class FactorRiskTableMode(EnumBase, Enum): + """Source object for position data""" + + Exposure = 'Exposure' + ZScore = 'ZScore' + + +class PositionSourceType(EnumBase, Enum): """Source object for position data""" @@ -61,8 +75,8 @@ class ReportMeasures(EnumBase, Enum): exposure = 'exposure' sensitivity = 'sensitivity' mctr = 'mctr' - mctRisk = 'mctRisk' - rmctRisk = 'rmctRisk' + annualizedMCTRisk = 'annualizedMCTRisk' + annualizedRMCTRisk = 'annualizedRMCTRisk' poRisk = 'poRisk' price = 'price' basePrice = 'basePrice' @@ -142,6 +156,7 @@ def __init__( created_by_id: str = None, created_time: datetime.datetime = None, entitlements: Entitlements = None, + earliest_start_date: datetime.date = None, entitlement_exclusions: EntitlementExclusions = None, id_: str = None, last_updated_by_id: str = None, @@ -161,6 +176,7 @@ def __init__( self.created_by_id = created_by_id self.created_time = created_time self.entitlements = entitlements + self.earliest_start_date = earliest_start_date self.entitlement_exclusions = entitlement_exclusions self.__id = id_ self.last_updated_by_id = last_updated_by_id @@ -237,6 +253,16 @@ def entitlements(self, value: Entitlements): self._property_changed('entitlements') self.__entitlements = value + @property + def earliest_start_date(self) -> datetime.date: + """ISO 8601-formatted date""" + return self.__earliest_start_date + + @earliest_start_date.setter + def earliest_start_date(self, value: datetime.date): + self._property_changed('earliest_start_date') + self.__earliest_start_date = value + @property def entitlement_exclusions(self) -> EntitlementExclusions: """Defines the exclusion entitlements of a given resource.""" diff --git a/gs_quant/target/risk_models.py b/gs_quant/target/risk_models.py index db1268e9..93bd9e6c 100644 --- a/gs_quant/target/risk_models.py +++ b/gs_quant/target/risk_models.py @@ -284,11 +284,11 @@ class FactorPortfolio(Base): def __init__( self, factor_id: str, - weight: List[float] + weights: List[float] ): super().__init__() self.factor_id = factor_id - self.weight = weight + self.weights = weights @property def factor_id(self) -> str: @@ -301,12 +301,12 @@ def factor_id(self, value: str): self.__factor_id = value @property - def weight(self) -> List[float]: + def weights(self) -> List[float]: """Weights for each asset in the portfolio""" return self.__weight - @weight.setter - def weight(self, value: List[float]): + @weights.setter + def weights(self, value: List[float]): self._property_changed('weight') self.__weight = value diff --git a/gs_quant/target/screens.py b/gs_quant/target/screens.py index 5bd79559..2a580044 100644 --- a/gs_quant/target/screens.py +++ b/gs_quant/target/screens.py @@ -14,39 +14,39 @@ under the License. """ -from gs_quant.common import (AssetScreenerRequestFilterLimits, AssetScreenerRequestFilterDateLimits, - AssetScreenerCreditStandardAndPoorsRatingOptions, Entitlements) +from gs_quant.target.common import * import datetime -from typing import Tuple -from gs_quant.base import Base, camel_case_translate +from typing import Mapping, Tuple, Union +from gs_quant.base import Base, InstrumentBase, camel_case_translate, get_enum_value class ScreenParameters(Base): + """Filters for credit asset screener in saved screen.""" @camel_case_translate def __init__( - self, - face_value: float = None, - direction: str = None, - currency: Tuple[str, ...] = None, - gs_liquidity_score: AssetScreenerRequestFilterLimits = None, - gs_charge_bps: AssetScreenerRequestFilterLimits = None, - gs_charge_dollars: AssetScreenerRequestFilterLimits = None, - modified_duration: AssetScreenerRequestFilterLimits = None, - issue_date: AssetScreenerRequestFilterDateLimits = None, - yield_to_convention: AssetScreenerRequestFilterLimits = None, - spread_to_benchmark: AssetScreenerRequestFilterLimits = None, - z_spread: AssetScreenerRequestFilterLimits = None, - g_spread: AssetScreenerRequestFilterLimits = None, - bval_mid_price: AssetScreenerRequestFilterLimits = None, - maturity: AssetScreenerRequestFilterLimits = None, - amount_outstanding: AssetScreenerRequestFilterLimits = None, - rating_standard_and_poors: AssetScreenerCreditStandardAndPoorsRatingOptions = None, - seniority: Tuple[str, ...] = None, - sector: Tuple[str, ...] = None, - name: str = None - ): + self, + face_value: float = None, + direction: str = None, + currency: Tuple[str, ...] = None, + gs_liquidity_score: AssetScreenerRequestFilterLimits = None, + gs_charge_bps: AssetScreenerRequestFilterLimits = None, + gs_charge_dollars: AssetScreenerRequestFilterLimits = None, + modified_duration: AssetScreenerRequestFilterLimits = None, + issue_date: AssetScreenerRequestFilterDateLimits = None, + yield_to_convention: AssetScreenerRequestFilterLimits = None, + spread_to_benchmark: AssetScreenerRequestFilterLimits = None, + z_spread: AssetScreenerRequestFilterLimits = None, + g_spread: AssetScreenerRequestFilterLimits = None, + bval_mid_price: AssetScreenerRequestFilterLimits = None, + maturity: AssetScreenerRequestFilterLimits = None, + amount_outstanding: AssetScreenerRequestFilterLimits = None, + rating_standard_and_poors: AssetScreenerCreditStandardAndPoorsRatingOptions = None, + seniority: Tuple[str, ...] = None, + sector: Tuple[str, ...] = None, + name: str = None + ): super().__init__() self.face_value = face_value self.direction = direction @@ -76,7 +76,7 @@ def face_value(self) -> float: @face_value.setter def face_value(self, value: float): self._property_changed('face_value') - self.__face_value = value + self.__face_value = value @property def direction(self) -> str: @@ -86,7 +86,7 @@ def direction(self) -> str: @direction.setter def direction(self, value: str): self._property_changed('direction') - self.__direction = value + self.__direction = value @property def currency(self) -> Tuple[str, ...]: @@ -96,7 +96,7 @@ def currency(self) -> Tuple[str, ...]: @currency.setter def currency(self, value: Tuple[str, ...]): self._property_changed('currency') - self.__currency = value + self.__currency = value @property def gs_liquidity_score(self) -> AssetScreenerRequestFilterLimits: @@ -106,7 +106,7 @@ def gs_liquidity_score(self) -> AssetScreenerRequestFilterLimits: @gs_liquidity_score.setter def gs_liquidity_score(self, value: AssetScreenerRequestFilterLimits): self._property_changed('gs_liquidity_score') - self.__gs_liquidity_score = value + self.__gs_liquidity_score = value @property def gs_charge_bps(self) -> AssetScreenerRequestFilterLimits: @@ -116,7 +116,7 @@ def gs_charge_bps(self) -> AssetScreenerRequestFilterLimits: @gs_charge_bps.setter def gs_charge_bps(self, value: AssetScreenerRequestFilterLimits): self._property_changed('gs_charge_bps') - self.__gs_charge_bps = value + self.__gs_charge_bps = value @property def gs_charge_dollars(self) -> AssetScreenerRequestFilterLimits: @@ -126,7 +126,7 @@ def gs_charge_dollars(self) -> AssetScreenerRequestFilterLimits: @gs_charge_dollars.setter def gs_charge_dollars(self, value: AssetScreenerRequestFilterLimits): self._property_changed('gs_charge_dollars') - self.__gs_charge_dollars = value + self.__gs_charge_dollars = value @property def modified_duration(self) -> AssetScreenerRequestFilterLimits: @@ -136,7 +136,7 @@ def modified_duration(self) -> AssetScreenerRequestFilterLimits: @modified_duration.setter def modified_duration(self, value: AssetScreenerRequestFilterLimits): self._property_changed('modified_duration') - self.__modified_duration = value + self.__modified_duration = value @property def issue_date(self) -> AssetScreenerRequestFilterDateLimits: @@ -146,7 +146,7 @@ def issue_date(self) -> AssetScreenerRequestFilterDateLimits: @issue_date.setter def issue_date(self, value: AssetScreenerRequestFilterDateLimits): self._property_changed('issue_date') - self.__issue_date = value + self.__issue_date = value @property def yield_to_convention(self) -> AssetScreenerRequestFilterLimits: @@ -156,7 +156,7 @@ def yield_to_convention(self) -> AssetScreenerRequestFilterLimits: @yield_to_convention.setter def yield_to_convention(self, value: AssetScreenerRequestFilterLimits): self._property_changed('yield_to_convention') - self.__yield_to_convention = value + self.__yield_to_convention = value @property def spread_to_benchmark(self) -> AssetScreenerRequestFilterLimits: @@ -167,7 +167,7 @@ def spread_to_benchmark(self) -> AssetScreenerRequestFilterLimits: @spread_to_benchmark.setter def spread_to_benchmark(self, value: AssetScreenerRequestFilterLimits): self._property_changed('spread_to_benchmark') - self.__spread_to_benchmark = value + self.__spread_to_benchmark = value @property def z_spread(self) -> AssetScreenerRequestFilterLimits: @@ -177,7 +177,7 @@ def z_spread(self) -> AssetScreenerRequestFilterLimits: @z_spread.setter def z_spread(self, value: AssetScreenerRequestFilterLimits): self._property_changed('z_spread') - self.__z_spread = value + self.__z_spread = value @property def g_spread(self) -> AssetScreenerRequestFilterLimits: @@ -188,7 +188,7 @@ def g_spread(self) -> AssetScreenerRequestFilterLimits: @g_spread.setter def g_spread(self, value: AssetScreenerRequestFilterLimits): self._property_changed('g_spread') - self.__g_spread = value + self.__g_spread = value @property def bval_mid_price(self) -> AssetScreenerRequestFilterLimits: @@ -198,7 +198,7 @@ def bval_mid_price(self) -> AssetScreenerRequestFilterLimits: @bval_mid_price.setter def bval_mid_price(self, value: AssetScreenerRequestFilterLimits): self._property_changed('bval_mid_price') - self.__bval_mid_price = value + self.__bval_mid_price = value @property def maturity(self) -> AssetScreenerRequestFilterLimits: @@ -208,7 +208,7 @@ def maturity(self) -> AssetScreenerRequestFilterLimits: @maturity.setter def maturity(self, value: AssetScreenerRequestFilterLimits): self._property_changed('maturity') - self.__maturity = value + self.__maturity = value @property def amount_outstanding(self) -> AssetScreenerRequestFilterLimits: @@ -219,7 +219,7 @@ def amount_outstanding(self) -> AssetScreenerRequestFilterLimits: @amount_outstanding.setter def amount_outstanding(self, value: AssetScreenerRequestFilterLimits): self._property_changed('amount_outstanding') - self.__amount_outstanding = value + self.__amount_outstanding = value @property def rating_standard_and_poors(self) -> AssetScreenerCreditStandardAndPoorsRatingOptions: @@ -229,7 +229,7 @@ def rating_standard_and_poors(self) -> AssetScreenerCreditStandardAndPoorsRating @rating_standard_and_poors.setter def rating_standard_and_poors(self, value: AssetScreenerCreditStandardAndPoorsRatingOptions): self._property_changed('rating_standard_and_poors') - self.__rating_standard_and_poors = value + self.__rating_standard_and_poors = value @property def seniority(self) -> Tuple[str, ...]: @@ -239,7 +239,7 @@ def seniority(self) -> Tuple[str, ...]: @seniority.setter def seniority(self, value: Tuple[str, ...]): self._property_changed('seniority') - self.__seniority = value + self.__seniority = value @property def sector(self) -> Tuple[str, ...]: @@ -249,26 +249,27 @@ def sector(self) -> Tuple[str, ...]: @sector.setter def sector(self, value: Tuple[str, ...]): self._property_changed('sector') - self.__sector = value + self.__sector = value class Screen(Base): + """Object representation of a Screen""" @camel_case_translate def __init__( - self, - name: str, - parameters: ScreenParameters, - id_: str = None, - active: bool = None, - owner_id: str = None, - created_by_id: str = None, - created_time: datetime.datetime = None, - last_updated_by_id: str = None, - last_updated_time: datetime.datetime = None, - entitlements: Entitlements = None - ): + self, + name: str, + parameters: ScreenParameters, + id_: str = None, + active: bool = None, + owner_id: str = None, + created_by_id: str = None, + created_time: datetime.datetime = None, + last_updated_by_id: str = None, + last_updated_time: datetime.datetime = None, + entitlements: Entitlements = None + ): super().__init__() self.__id = id_ self.active = active @@ -289,7 +290,7 @@ def id(self) -> str: @id.setter def id(self, value: str): self._property_changed('id') - self.__id = value + self.__id = value @property def active(self) -> bool: @@ -298,7 +299,7 @@ def active(self) -> bool: @active.setter def active(self, value: bool): self._property_changed('active') - self.__active = value + self.__active = value @property def owner_id(self) -> str: @@ -308,7 +309,7 @@ def owner_id(self) -> str: @owner_id.setter def owner_id(self, value: str): self._property_changed('owner_id') - self.__owner_id = value + self.__owner_id = value @property def created_by_id(self) -> str: @@ -318,7 +319,7 @@ def created_by_id(self) -> str: @created_by_id.setter def created_by_id(self, value: str): self._property_changed('created_by_id') - self.__created_by_id = value + self.__created_by_id = value @property def created_time(self) -> datetime.datetime: @@ -328,7 +329,7 @@ def created_time(self) -> datetime.datetime: @created_time.setter def created_time(self, value: datetime.datetime): self._property_changed('created_time') - self.__created_time = value + self.__created_time = value @property def last_updated_by_id(self) -> str: @@ -338,7 +339,7 @@ def last_updated_by_id(self) -> str: @last_updated_by_id.setter def last_updated_by_id(self, value: str): self._property_changed('last_updated_by_id') - self.__last_updated_by_id = value + self.__last_updated_by_id = value @property def last_updated_time(self) -> datetime.datetime: @@ -348,7 +349,7 @@ def last_updated_time(self) -> datetime.datetime: @last_updated_time.setter def last_updated_time(self, value: datetime.datetime): self._property_changed('last_updated_time') - self.__last_updated_time = value + self.__last_updated_time = value @property def entitlements(self) -> Entitlements: @@ -358,7 +359,7 @@ def entitlements(self) -> Entitlements: @entitlements.setter def entitlements(self, value: Entitlements): self._property_changed('entitlements') - self.__entitlements = value + self.__entitlements = value @property def name(self) -> str: @@ -368,7 +369,7 @@ def name(self) -> str: @name.setter def name(self, value: str): self._property_changed('name') - self.__name = value + self.__name = value @property def parameters(self) -> ScreenParameters: @@ -378,4 +379,4 @@ def parameters(self) -> ScreenParameters: @parameters.setter def parameters(self, value: ScreenParameters): self._property_changed('parameters') - self.__parameters = value + self.__parameters = value diff --git a/gs_quant/test/api/test_json.py b/gs_quant/test/api/test_json.py new file mode 100644 index 00000000..d66b743e --- /dev/null +++ b/gs_quant/test/api/test_json.py @@ -0,0 +1,39 @@ +""" +Copyright 2021 Goldman Sachs. +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, +software distributed under the License is distributed on an +"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +KIND, either express or implied. See the License for the +specific language governing permissions and limitations +under the License. +""" + +import datetime as dt +import json + +import pytz +from gs_quant.json_encoder import JSONEncoder + + +def test_datetime_serialisation(): + dates = [ + dt.datetime(2021, 8, 10, 10, 39, 19), + dt.datetime(2021, 8, 10, 10, 39, 19, 59876), + dt.datetime(2021, 8, 10, 10, 39, 19, tzinfo=pytz.timezone('EST')), + dt.datetime(2021, 8, 10, 10, 39, 19, tzinfo=pytz.timezone('UTC')), + ] + expected = [ + '"2021-08-10T10:39:19.000Z"', + '"2021-08-10T10:39:19.059Z"', + '"2021-08-10T10:39:19.000-05:00"', + '"2021-08-10T10:39:19.000+00:00"', + ] + for d, e in zip(dates, expected): + encoded = json.dumps(d, cls=JSONEncoder) + assert encoded == e diff --git a/gs_quant/test/api/test_reports.py b/gs_quant/test/api/test_reports.py index 4dcd0b20..820987ef 100644 --- a/gs_quant/test/api/test_reports.py +++ b/gs_quant/test/api/test_reports.py @@ -325,5 +325,5 @@ def test_get_risk_factor_data_results(mocker): # run test response = GsReportApi.get_risk_factor_data_results('reportId') - GsSession.current._get.assert_called_with('/risk/factors/reports/reportId/results') + GsSession.current._get.assert_called_with('/risk/factors/reports/reportId/results?') assert response == mock_response diff --git a/gs_quant/test/markets/test_portfolio_manager.py b/gs_quant/test/markets/test_portfolio_manager.py index 5baaf1e1..d7c8874c 100644 --- a/gs_quant/test/markets/test_portfolio_manager.py +++ b/gs_quant/test/markets/test_portfolio_manager.py @@ -68,9 +68,9 @@ def test_get_schedule_dates(mocker): # run test pm = PortfolioManager('MP') dates = pm.get_schedule_dates(backcast=True) - assert dates == [dt.date(2019, 1, 1), dt.date(2020, 1, 2)] + assert dates[0] == dt.date(2019, 1, 1) dates = pm.get_schedule_dates(backcast=False) - assert dates == [dt.date(2020, 1, 2), dt.date(2020, 3, 1)] + assert dates[0] == dt.date(2020, 1, 2) def test_set_entitlements(mocker): diff --git a/gs_quant/test/markets/test_report.py b/gs_quant/test/markets/test_report.py index 6d0ab61c..fd6ac700 100644 --- a/gs_quant/test/markets/test_report.py +++ b/gs_quant/test/markets/test_report.py @@ -16,7 +16,8 @@ import pytest -from gs_quant.markets.report import FactorRiskReport, PerformanceReport +from gs_quant.api.gs.data import GsDataApi +from gs_quant.markets.report import FactorRiskReport, PerformanceReport, ThematicReport from gs_quant.session import * from gs_quant.target.reports import ReportStatus, PositionSourceType, ReportType, ReportParameters, Report @@ -36,6 +37,13 @@ parameters=None, status=ReportStatus.done ) +fake_pta = ThematicReport(report_id='PTAID', + position_source_type=PositionSourceType.Portfolio, + position_source_id='PORTFOLIOID', + report_type=ReportType.Portfolio_Thematic_Analytics, + parameters=None, + status=ReportStatus.done + ) factor_risk_results = [ { @@ -67,6 +75,45 @@ } ] +thematic_results = [ + { + "date": "2021-07-12", + "reportId": "PTAID", + "basketId": "MA01GPR89HZF1FZ5", + "region": "Asia", + "grossExposure": 3.448370345015856E8, + "thematicExposure": 1.1057087573594835E8, + "updateTime": "2021-07-20T23:43:38Z" + }, + { + "date": "2021-07-13", + "reportId": "PTAID", + "basketId": "MA01GPR89HZF1FZ5", + "region": "Asia", + "grossExposure": 3.375772519907556E8, + "thematicExposure": 1.0511196135243121E8, + "updateTime": "2021-07-20T23:43:38Z" + }, + { + "date": "2021-07-14", + "reportId": "PTAID", + "basketId": "MA01GPR89HZF1FZ5", + "region": "Asia", + "grossExposure": 3.321189950666118E8, + "thematicExposure": 1.0089556961211234E8, + "updateTime": "2021-07-20T23:43:38Z" + }, + { + "date": "2021-07-15", + "reportId": "PTAID", + "basketId": "MA01GPR89HZF1FZ5", + "region": "Asia", + "grossExposure": 3.274071805135091E8, + "thematicExposure": 9.706991264825605E7, + "updateTime": "2021-07-20T23:43:38Z" + } +] + def test_get_performance_report(mocker): # mock GsSession @@ -209,5 +256,21 @@ def test_get_daily_risk(mocker): assert len(response) == 3 +def test_get_pta_measures(mocker): + # mock GsSession + mocker.patch.object( + GsSession.__class__, + 'default_value', + return_value=GsSession.get( + Environment.QA, + 'client_id', + 'secret')) + mocker.patch.object(GsDataApi, 'query_data', return_value=thematic_results) + + # run test + response = fake_pta._get_pta_measures(["grossExposure", "thematicExposure"]) + assert len(response) == 4 + + if __name__ == '__main__': pytest.main(args=[__file__]) diff --git a/gs_quant/test/models/test_risk_model.py b/gs_quant/test/models/test_risk_model.py index 5000de4a..44c1ce18 100644 --- a/gs_quant/test/models/test_risk_model.py +++ b/gs_quant/test/models/test_risk_model.py @@ -72,7 +72,7 @@ def test_create_risk_model(mocker): 0.1, entitlements={}, description='Test') - new_model.upload() + new_model.save() assert new_model.id == mock_risk_model_obj.id assert new_model.name == mock_risk_model_obj.name assert new_model.description == mock_risk_model_obj.description diff --git a/gs_quant/test/timeseries/test_backtesting.py b/gs_quant/test/timeseries/test_backtesting.py index 7fc18159..40c0fa87 100644 --- a/gs_quant/test/timeseries/test_backtesting.py +++ b/gs_quant/test/timeseries/test_backtesting.py @@ -14,12 +14,18 @@ under the License. """ +import datetime + +import pandas as pd import pytest from pandas.testing import assert_series_equal -from gs_quant.timeseries import * from testfixtures import Replacer from testfixtures.mock import Mock +from gs_quant.timeseries import EdrDataReference +from gs_quant.timeseries.backtesting import Basket, basket_series, MqValueError, MqTypeError, RebalFreq, date, \ + DataContext, np + def test_basket_series(): dates = [ diff --git a/gs_quant/test/timeseries/test_measures.py b/gs_quant/test/timeseries/test_measures.py index 01251056..ab98256f 100644 --- a/gs_quant/test/timeseries/test_measures.py +++ b/gs_quant/test/timeseries/test_measures.py @@ -1628,6 +1628,29 @@ def test_cds_implied_vol(): replace.restore() +def test_implied_vol_credit(): + replace = Replacer() + mock_cds = Index('MA890', AssetClass.Equity, 'CDS') + replace('gs_quant.timeseries.measures.GsDataApi.get_market_data', mock_eq) + actual = tm.implied_volatility_credit(mock_cds, '1m', tm.CdsVolReference.DELTA_CALL, 10) + assert_series_equal(pd.Series([5, 1, 2], index=_index * 3, name='impliedVolatilityByDeltaStrike'), + pd.Series(actual)) + assert actual.dataset_ids == _test_datasets + actual = tm.implied_volatility_credit(mock_cds, '1m', tm.CdsVolReference.DELTA_PUT, 10) + assert_series_equal(pd.Series([5, 1, 2], index=_index * 3, name='impliedVolatilityByDeltaStrike'), + pd.Series(actual)) + assert actual.dataset_ids == _test_datasets + actual = tm.implied_volatility_credit(mock_cds, '1m', tm.CdsVolReference.FORWARD, 100) + assert_series_equal(pd.Series([5, 1, 2], index=_index * 3, name='impliedVolatilityByDeltaStrike'), + pd.Series(actual)) + assert actual.dataset_ids == _test_datasets + with pytest.raises(NotImplementedError): + tm.implied_volatility_credit(..., '1m', tm.CdsVolReference.DELTA_PUT, 75, real_time=True) + with pytest.raises(NotImplementedError): + tm.implied_volatility_credit(..., '1m', "", 75) + replace.restore() + + def test_avg_impl_vol(mocker): replace = Replacer() mock_spx = Index('MA890', AssetClass.Equity, 'SPX') diff --git a/gs_quant/test/timeseries/test_measures_reports.py b/gs_quant/test/timeseries/test_measures_reports.py index bd2c5500..b6e8be05 100644 --- a/gs_quant/test/timeseries/test_measures_reports.py +++ b/gs_quant/test/timeseries/test_measures_reports.py @@ -14,21 +14,24 @@ under the License. """ import datetime + import pandas as pd import pytest from testfixtures import Replacer from testfixtures.mock import Mock import gs_quant.timeseries.measures_reports as mr +from gs_quant.api.gs.assets import GsAsset from gs_quant.api.gs.data import MarketDataResponseFrame from gs_quant.data.core import DataContext from gs_quant.errors import MqValueError, MqError +from gs_quant.markets.baskets import Basket +from gs_quant.markets.report import PerformanceReport, ThematicReport from gs_quant.models.risk_model import FactorRiskModel as Factor_Risk_Model -from gs_quant.target.common import ReportParameters +from gs_quant.target.common import ReportParameters, AssetType +from gs_quant.target.portfolios import RiskAumSource, Portfolio from gs_quant.target.reports import Report, PositionSourceType, ReportType from gs_quant.target.risk_models import RiskModel, CoverageType, Term, UniverseIdentifier -from gs_quant.markets.report import PerformanceReport -from gs_quant.target.portfolios import RiskAumSource, Portfolio risk_model = RiskModel(coverage=CoverageType.Country, id_='model_id', name='Fake Risk Model', term=Term.Long, universe_identifier=UniverseIdentifier.gsid, vendor='GS', @@ -149,6 +152,49 @@ aum = [{'date': '2020-01-02', 'aum': 2}, {'date': '2020-01-03', 'aum': 2.2}, {'date': '2020-01-04', 'aum': 2.4}] +thematic_data = [ + { + "date": "2021-07-12", + "reportId": "PTAID", + "basketId": "MA01GPR89HZF1FZ5", + "region": "Asia", + "grossExposure": 3.448370345015856E8, + "thematicExposure": 2, + "thematicBeta": 1, + "updateTime": "2021-07-20T23:43:38Z" + }, + { + "date": "2021-07-13", + "reportId": "PTAID", + "basketId": "MA01GPR89HZF1FZ5", + "region": "Asia", + "grossExposure": 3.375772519907556E8, + "thematicExposure": 2, + "thematicBeta": 1, + "updateTime": "2021-07-20T23:43:38Z" + }, + { + "date": "2021-07-14", + "reportId": "PTAID", + "basketId": "MA01GPR89HZF1FZ5", + "region": "Asia", + "grossExposure": 3.321189950666118E8, + "thematicExposure": 2, + "thematicBeta": 1, + "updateTime": "2021-07-20T23:43:38Z" + }, + { + "date": "2021-07-15", + "reportId": "PTAID", + "basketId": "MA01GPR89HZF1FZ5", + "region": "Asia", + "grossExposure": 3.274071805135091E8, + "thematicExposure": 2, + "thematicBeta": 1, + "updateTime": "2021-07-20T23:43:38Z" + } +] + def mock_risk_model(): risk_model = RiskModel(coverage=CoverageType.Country, id_='model_id', name='Fake Risk Model', @@ -491,5 +537,53 @@ def test_normalized_performance_no_custom_aum(): replace.restore() +def test_thematic_exposure(): + replace = Replacer() + + # mock getting PTA report + mock = replace('gs_quant.markets.report.ThematicReport.get', Mock()) + mock.return_value = ThematicReport(id='report_id') + + # mock getting thematic exposure + mock = replace('gs_quant.markets.report.ThematicReport.get_thematic_exposure', Mock()) + mock.return_value = pd.DataFrame(thematic_data) + + # mock getting thematic basket + mock = replace('gs_quant.markets.baskets.Basket.get', Mock()) + mock.return_value = Basket(GsAsset(id_='basket_id', asset_class='Equity', + type_=AssetType.Custom_Basket, + name='Basket')) + + with DataContext(datetime.date(2020, 7, 12), datetime.date(2020, 7, 15)): + actual = mr.thematic_exposure('report_id', 'basket_ticker') + assert all(actual.values == [2, 2, 2, 2]) + + replace.restore() + + +def test_thematic_beta(): + replace = Replacer() + + # mock getting PTA report + mock = replace('gs_quant.markets.report.ThematicReport.get', Mock()) + mock.return_value = ThematicReport(id='report_id') + + # mock getting thematic exposure + mock = replace('gs_quant.markets.report.ThematicReport.get_thematic_betas', Mock()) + mock.return_value = pd.DataFrame(thematic_data) + + # mock getting thematic basket + mock = replace('gs_quant.markets.baskets.Basket.get', Mock()) + mock.return_value = Basket(GsAsset(id_='basket_id', asset_class='Equity', + type_=AssetType.Custom_Basket, + name='Basket')) + + with DataContext(datetime.date(2020, 7, 12), datetime.date(2020, 7, 15)): + actual = mr.thematic_beta('report_id', 'basket_ticker') + assert all(actual.values == [1, 1, 1, 1]) + + replace.restore() + + if __name__ == '__main__': pytest.main(args=[__file__]) diff --git a/gs_quant/timeseries/measures.py b/gs_quant/timeseries/measures.py index 3792b0e1..2361077e 100644 --- a/gs_quant/timeseries/measures.py +++ b/gs_quant/timeseries/measures.py @@ -576,6 +576,52 @@ def cds_implied_volatility(asset: Asset, expiry: str, tenor: str, strike_referen return _extract_series_from_df(df, QueryType.IMPLIED_VOLATILITY_BY_DELTA_STRIKE) +@plot_measure((AssetClass.Credit,), (AssetType.Index,), [QueryType.IMPLIED_VOLATILITY_BY_DELTA_STRIKE], + display_name='implied_volatility') +def implied_volatility_credit(asset: Asset, expiry: str, strike_reference: CdsVolReference, + relative_strike: Real, *, source: str = None, real_time: bool = False, + request_id: Optional[str] = None) -> Series: + """ + Volatility of a cds index implied by observations of market prices. + + :param asset: asset object loaded from security master + :param expiry: relative date representation of expiration date on the option e.g. 3m + :param strike_reference: reference for strike level + :param relative_strike: strike relative to reference + :param source: name of function caller + :param real_time: whether to retrieve intraday data instead of EOD + :param request_id: service request id, if any + :return: implied volatility curve + """ + if real_time: + raise NotImplementedError('realtime implied_volatility not implemented in credit options') + + if strike_reference is CdsVolReference.FORWARD: + delta_strike = 'ATMF' + elif strike_reference is CdsVolReference.DELTA_CALL: + delta_strike = "{}DC".format(relative_strike) + elif strike_reference is CdsVolReference.DELTA_PUT: + delta_strike = "{}DP".format(relative_strike) + else: + raise NotImplementedError('Option Type: {} not implemented in credit', strike_reference) + + _logger.debug('where expiry=%s, deltaStrike=%s', expiry, delta_strike) + + q = GsDataApi.build_market_data_query( + [asset.get_marquee_id()], + QueryType.IMPLIED_VOLATILITY_BY_DELTA_STRIKE, + where=dict( + expiry=expiry, + deltaStrike=delta_strike, + ), + source=source, + real_time=real_time + ) + log_debug(request_id, _logger, 'q %s', q) + df = _market_data_timed(q, request_id) + return _extract_series_from_df(df, QueryType.IMPLIED_VOLATILITY_BY_DELTA_STRIKE) + + @plot_measure((AssetClass.Equity, AssetClass.Commod, AssetClass.FX,), None, [MeasureDependency(id_provider=cross_stored_direction_for_fx_vol, query_type=QueryType.IMPLIED_VOLATILITY)], @@ -2424,7 +2470,7 @@ def eu_ng_hub_to_swap(asset_spec: ASSET_SPEC) -> str: @plot_measure((AssetClass.Commod,), (AssetType.CommodityNaturalGasHub, AssetType.CommodityEUNaturalGasHub), [MeasureDependency(id_provider=eu_ng_hub_to_swap, query_type=QueryType.FORWARD_PRICE)], - display_name='forward_price') + display_name='forward_price_ng') def forward_price_ng(asset: Asset, contract_range: str = 'F20', price_method: str = 'GDD', *, source: str = None, real_time: bool = False) -> pd.Series: """ diff --git a/gs_quant/timeseries/measures_reports.py b/gs_quant/timeseries/measures_reports.py index 1ca2a5a7..c4a53ba5 100644 --- a/gs_quant/timeseries/measures_reports.py +++ b/gs_quant/timeseries/measures_reports.py @@ -19,16 +19,18 @@ from pydash import decapitalize from gs_quant.api.gs.data import QueryType +from gs_quant.api.gs.portfolios import GsPortfolioApi from gs_quant.data.core import DataContext from gs_quant.entities.entity import EntityType from gs_quant.errors import MqValueError, MqError +from gs_quant.markets.baskets import Basket from gs_quant.markets.factor import Factor from gs_quant.markets.report import FactorRiskReport, PerformanceReport +from gs_quant.markets.report import ThematicReport from gs_quant.models.risk_model import ReturnFormat from gs_quant.target.portfolios import RiskAumSource from gs_quant.timeseries import plot_measure_entity from gs_quant.timeseries.measures import _extract_series_from_df -from gs_quant.api.gs.portfolios import GsPortfolioApi @plot_measure_entity(EntityType.REPORT, [QueryType.FACTOR_EXPOSURE]) @@ -178,6 +180,54 @@ def normalized_performance(report_id: str, aum_source: str = None, *, source: st return pd.Series(data['normalizedPerformance'], name="normalizedPerformance").dropna() +@plot_measure_entity(EntityType.REPORT, [QueryType.THEMATIC_EXPOSURE]) +def thematic_exposure(report_id: str, basket_ticker: str, *, source: str = None, + real_time: bool = False, request_id: Optional[str] = None) -> pd.Series: + """ + Thematic exposure of a portfolio to a requested GS thematic flagship basket + + :param report_id: portfolio thematic analytics report id + :param basket_ticker: ticker for thematic basket + :param source: name of function caller + :param real_time: whether to retrieve intraday data instead of EOD + :param request_id: server request id + :return: Timeseries of daily thematic beta of portfolio to requested flagship basket + """ + thematic_report = ThematicReport.get(report_id) + thematic_basket = Basket.get(basket_ticker) + df = thematic_report.get_thematic_exposure(start_date=DataContext.current.start_time, + end_date=DataContext.current.end_time, + basket_ids=[thematic_basket.get_marquee_id()]) + if not df.empty: + df.set_index('date', inplace=True) + df.index = pd.to_datetime(df.index) + return _extract_series_from_df(df, QueryType.THEMATIC_EXPOSURE) + + +@plot_measure_entity(EntityType.REPORT, [QueryType.THEMATIC_EXPOSURE]) +def thematic_beta(report_id: str, basket_ticker: str, *, source: str = None, + real_time: bool = False, request_id: Optional[str] = None) -> pd.Series: + """ + Thematic beta values of a portfolio to a requested GS thematic flagship basket + + :param report_id: portfolio thematic analytics report id + :param basket_ticker: ticker for thematic basket + :param source: name of function caller + :param real_time: whether to retrieve intraday data instead of EOD + :param request_id: server request id + :return: Timeseries of daily thematic beta of portfolio to requested flagship basket + """ + thematic_report = ThematicReport.get(report_id) + thematic_basket = Basket.get(basket_ticker) + df = thematic_report.get_thematic_betas(start_date=DataContext.current.start_time, + end_date=DataContext.current.end_time, + basket_ids=[thematic_basket.get_marquee_id()]) + if not df.empty: + df.set_index('date', inplace=True) + df.index = pd.to_datetime(df.index) + return _extract_series_from_df(df, QueryType.THEMATIC_BETA) + + def _get_factor_data(report_id: str, factor_name: str, query_type: QueryType) -> pd.Series: # Check params report = FactorRiskReport.get(report_id) diff --git a/setup.py b/setup.py index 98501554..5ae710cd 100644 --- a/setup.py +++ b/setup.py @@ -79,7 +79,7 @@ "tqdm", "certifi", "deprecation", - "plotly>=5.1.0" + "plotly==5.1.0" ], extras_require={ "internal": ["gs_quant_internal>=1.1.30", "requests_kerberos"],