Skip to content

Commit

Permalink
Chore: Make release 1.2.2
Browse files Browse the repository at this point in the history
  • Loading branch information
martinroberson authored and Pang, Stephen S C. [GBM Public] committed Nov 14, 2024
1 parent bac5543 commit 7d79cf4
Show file tree
Hide file tree
Showing 4 changed files with 61 additions and 15 deletions.
56 changes: 43 additions & 13 deletions gs_quant/api/gs/secmaster.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,10 +16,11 @@
import datetime as dt
import json
import math
from _typeshed import SupportsLessThan
from enum import Enum
from functools import partial
from itertools import groupby
from typing import Union, Iterable, Dict
from typing import Union, Iterable, Dict, Any

import tqdm

Expand Down Expand Up @@ -288,7 +289,7 @@ def __stringify_boolean(cls, bool_value):
return str(bool_value).lower()

@classmethod
def __fetch_all(cls, fetch_fn, offset_key, total_batches=None):
def __fetch_all(cls, fetch_fn, offset_key, total_batches=None, extract_results=True):
accumulator = []
offset = offset_key
progress_info = tqdm.tqdm(desc="Processing", unit=" batch") if total_batches is None else tqdm.tqdm(
Expand All @@ -297,7 +298,10 @@ def __fetch_all(cls, fetch_fn, offset_key, total_batches=None):
progress_info.update(1)
data = fetch_fn(offset_key=offset)
if data is not None:
accumulator.extend(data['results'])
if extract_results is True:
accumulator.extend(data['results'])
else:
accumulator.append(data)
if 'offsetKey' not in data:
progress_info.close()
break
Expand Down Expand Up @@ -413,28 +417,54 @@ def prepare_params(cls, params, is_primary, offset_key, type_, effective_date=No
params["effectiveDate"] = effective_date

@classmethod
def get_deltas(cls, start_time: dt.datetime = None, end_time: dt.datetime = None, raw: bool = None) -> \
def _get_deltas(cls, start_time: dt.datetime = None, end_time: dt.datetime = None, raw: bool = None,
scope: list = None, limit: int = None, offset_key: str = None) -> \
Iterable[dict]:
"""
Get all identifier changes betwen two time stamps
@param start_time: start time
@param end_time: end time
@param raw: flag, if true (default) aggregates data to more readable form, if false shows unprocessed results.
@return: list of dict
"""
params = {}

params = {}
if raw is not None:
params["raw"] = GsSecurityMasterApi.__stringify_boolean(raw)
if start_time is not None:
params["startTime"] = start_time

if end_time is not None:
params["endTime"] = end_time
if scope is not None:
params["scope"] = scope
if limit is not None:
params["limit"] = limit
if offset_key is not None:
params["offsetKey"] = offset_key

payload = json.loads(json.dumps(params, cls=JSONEncoder))
r = GsSession.current._get("/markets/securities/identifiers/updates-feed", payload=payload)
return r

@classmethod
def get_deltas(cls, start_time: dt.datetime = None, end_time: dt.datetime = None, raw: bool = None,
scope: list = None, limit: int = None, offset_key: str = None, scroll_all_pages: bool = True) -> \
Union[dict[str, Union[Union[list[Any], None, SupportsLessThan], Any]], Iterable[dict]]:
"""
Get all identifier changes between two time stamps
@param scroll_all_pages:
@param start_time: start time
@param end_time: end time
@param limit: page size of returned matches
@param scope: narrow down the search to a specific set of events
@param offset_key: offset key to fetch next page
@param raw: flag, if true (default) aggregates data to more readable form, if false shows unprocessed results.
@return: list of dict
"""
if scroll_all_pages:
fn = partial(cls._get_deltas, start_time, end_time, raw, scope, limit)
results = cls.__fetch_all(fn, offset_key, extract_results=False)
latest_update_time = max(result['lastUpdateTime'] for result in results)
res = [item for result in results for item in result["results"]]
request_id = results[0]["requestId"] if results else None
return {"results": res, "lastUpdateTime": latest_update_time, "requestId": request_id}
else:
results = cls._get_deltas(start_time, end_time, raw, scope, limit, offset_key)
return results

@classmethod
def get_exchanges(cls, effective_date: dt.date = None,
**query_params: Dict[str, Union[str, Iterable[str]]]):
Expand Down
4 changes: 3 additions & 1 deletion gs_quant/risk/result_handlers.py
Original file line number Diff line number Diff line change
Expand Up @@ -377,6 +377,7 @@ def mmapi_pca_table_handler(result: dict, risk_key: RiskKey, _instrument: Instru
r['coordinate'].update({'level': r['level']})
r['coordinate'].update({'sensitivity': r['sensitivity']})
r['coordinate'].update({'irDelta': r['irDelta']})
r['coordinate'].update({'endDate': r['endDate']})
coordinates.append(r['coordinate'])

mappings = (('mkt_type', 'type'),
Expand All @@ -391,7 +392,8 @@ def mmapi_pca_table_handler(result: dict, risk_key: RiskKey, _instrument: Instru
('layer4', 'layer4'),
('level', 'level'),
('sensitivity', 'sensitivity'),
('irDelta', 'irDelta'))
('irDelta', 'irDelta'),
('endDate', 'endDate'))

return __dataframe_handler(coordinates, mappings, risk_key, request_id=request_id)

Expand Down
2 changes: 1 addition & 1 deletion gs_quant/session.py
Original file line number Diff line number Diff line change
Expand Up @@ -724,8 +724,8 @@ def __init__(self, environment_or_domain: str, domain: str = Domain.APP, api_ver
application: str = DEFAULT_APPLICATION, http_adapter: requests.adapters.HTTPAdapter = None,
application_version: str = APP_VERSION, mq_login_token=None):
selected_domain, verify = self.domain_and_verify(environment_or_domain)
env_config = self._config_for_environment(environment_or_domain)
if domain == Domain.MDS_WEB:
env_config = self._config_for_environment(environment_or_domain)
selected_domain = env_config[domain]
self.mq_login_token = mq_login_token
GsSession.__init__(self, selected_domain, environment_or_domain, api_version=api_version,
Expand Down
14 changes: 14 additions & 0 deletions gs_quant/target/workflow_quote.py
Original file line number Diff line number Diff line change
Expand Up @@ -77,6 +77,15 @@ class OverlayType(EnumBase, Enum):
_None = 'None'


class PriceFormat(EnumBase, Enum):

"""display unit for price"""

Absolute = 'Absolute'
Relative = 'Relative'
Cents = 'Cents'


@dataclass
class HedgeTypes(Base):
pass
Expand All @@ -91,6 +100,10 @@ class CustomDeltaHedge(HedgeTypes):
name: Optional[str] = field(default=None, metadata=name_metadata)


class GenericResponse(DictBase):
pass


@handle_camel_case_args
@dataclass_json(letter_case=LetterCase.CAMEL)
@dataclass(unsafe_hash=True, repr=False)
Expand Down Expand Up @@ -233,6 +246,7 @@ class VisualStructuringReport(QuoteReport):
asset_class: Optional[str] = field(default=None, metadata=field_metadata)
hedge_instruction: Optional[HedgeTypes] = field(default=None, metadata=field_metadata)
sales_premium_adjustment: Optional[SalesPremiumAdjustment] = field(default=None, metadata=field_metadata)
price_format: Optional[PriceFormat] = field(default=None, metadata=field_metadata)
name: Optional[str] = field(default=None, metadata=name_metadata)


Expand Down

0 comments on commit 7d79cf4

Please sign in to comment.