boosted.api.api_client

   1# Copyright (C) 2020 Gradient Boosted Investments, Inc. - All Rights Reserved
   2
   3import base64
   4import csv
   5import datetime
   6import functools
   7import io
   8import itertools
   9import json
  10import logging
  11import math
  12import mimetypes
  13import os
  14import sys
  15import tempfile
  16import time
  17from datetime import date, timedelta
  18from typing import Any, Dict, List, Literal, Optional, Tuple, Union
  19from urllib import parse
  20
  21import numpy as np
  22import pandas
  23import pandas as pd
  24import requests
  25from dateutil import parser
  26
  27import boosted.api.graphql_queries as graphql_queries
  28from boosted.api.api_type import (
  29    BoostedAPIException,
  30    BoostedDate,
  31    ChunkStatus,
  32    ColumnSubRole,
  33    DataAddType,
  34    DataSetConfig,
  35    DataSetType,
  36    DateIdentCountryCurrency,
  37    GbiIdSecurity,
  38    GbiIdTickerISIN,
  39    HedgeExperiment,
  40    HedgeExperimentDetails,
  41    HedgeExperimentScenario,
  42    Language,
  43    NewsHorizon,
  44    PortfolioSettings,
  45    Status,
  46    ThemeUniverse,
  47    hedge_experiment_type,
  48)
  49from boosted.api.api_util import (
  50    get_valid_iso_dates,
  51    convert_date,
  52    get_date_range,
  53    infer_dataset_schema,
  54    protoCubeJsonDataToDataFrame,
  55    to_camel_case,
  56    validate_start_and_end_dates,
  57)
  58
  59logger = logging.getLogger("boosted.api.client")
  60logging.basicConfig()
  61
  62g_boosted_api_url = "https://insights.boosted.ai"
  63g_boosted_api_url_dev = "https://insights-dev.boosted.ai"
  64WATCHLIST_ROUTE_PREFIX = "/api/dal/watchlist"
  65ROUTE_PREFIX = WATCHLIST_ROUTE_PREFIX
  66DAL_WATCHLIST_ROUTE = "/api/v0/watchlist"
  67DAL_SECURITIES_ROUTE = "/api/v0/securities"
  68DAL_PA_ROUTE = "/api/v0/portfolio-analysis"
  69PORTFOLIO_GROUP_ROUTE = "/api/v0/portfolio-group"
  70
  71RISK_FACTOR = "risk-factor"
  72RISK_FACTOR_V2 = "risk-factor-v2"
  73RISK_FACTOR_COLUMNS = [
  74    "depth",
  75    "identifier",
  76    "stock_count",
  77    "volatility",
  78    "exposure",
  79    "rating",
  80    "rating_delta",
  81]
  82
  83
  84class BoostedClient:
  85    def __init__(
  86        self, api_key, override_uri=None, debug=False, proxy=None, disable_verify_ssl=False
  87    ):
  88        """
  89        Parameters
  90        ----------
  91        api_key: str
  92            Your API key provided by the Boosted application.  See your profile
  93            to generate a new key.
  94        proxy: str
  95            Your organization may require the use of a proxy for access.
  96            The address of a HTTPS proxy in the format of <address>:<port>.
  97            Examples are "123.456.789:123" or "my.proxy.com:123".
  98            Do not prepend with "https://".
  99        disable_verify_ssl: bool
 100            Your networking setup may be behind a firewall which performs SSL
 101            inspection. Either set the REQUESTS_CA_BUNDLE environment variable
 102            to point to the location of a custom certificate bundle, or set this
 103            parameter to True to disable SSL verification as a workaround.
 104        """
 105        if override_uri is None:
 106            self.base_uri = g_boosted_api_url
 107        else:
 108            self.base_uri = override_uri
 109        self.api_key = api_key
 110        self.debug = debug
 111        self._request_params: Dict = {}
 112        if debug:
 113            logger.setLevel(logging.DEBUG)
 114        else:
 115            logger.setLevel(logging.INFO)
 116        if proxy is not None:
 117            self._request_params["proxies"] = {"https": proxy}
 118        if disable_verify_ssl:
 119            self._request_params["verify"] = False
 120
 121    def __print_json_info(self, json_data, isInference=False):
 122        if "warnings" in json_data.keys():
 123            for warning in json_data["warnings"]:
 124                logger.warning("  {0}".format(warning))
 125        if "errors" in json_data.keys():
 126            for error in json_data["errors"]:
 127                logger.error("  {0}".format(error))
 128                return Status.FAIL
 129
 130        if "result" in json_data.keys():
 131            results_data = json_data["result"]
 132            if isInference:
 133                if "inferenceResultsUrl" in results_data.keys():
 134                    res_url = parse.urlparse(results_data["inferenceResultsUrl"])
 135                    logger.debug(res_url)
 136                    logger.info("Inference started.")
 137            if "updateCount" in results_data.keys():
 138                logger.info("Updated {0} rows.".format(results_data["updateCount"]))
 139            if "createCount" in results_data.keys():
 140                logger.info("Created {0} rows.".format(results_data["createCount"]))
 141            return Status.SUCCESS
 142
 143    def __to_date_obj(self, dt):
 144        if isinstance(dt, datetime.datetime):
 145            dt = dt.date()
 146        elif isinstance(dt, datetime.date):
 147            return dt
 148        elif isinstance(dt, str):
 149            try:
 150                dt = parser.parse(dt).date()
 151            except ValueError:
 152                raise ValueError('dt: "' + dt + '" is not a valid date.')
 153        return dt
 154
 155    def __iso_format(self, dt):
 156        date = self.__to_date_obj(dt)
 157        if date is not None:
 158            date = date.isoformat()
 159        return date
 160
 161    def _check_status_code(self, response, isInference=False):
 162        has_json = False
 163        try:
 164            logger.debug(response.headers)
 165            if "Content-Type" in response.headers:
 166                if response.headers["Content-Type"].startswith("application/json"):
 167                    json_data = response.json()
 168                    has_json = True
 169            else:
 170                has_json = False
 171        except json.JSONDecodeError:
 172            logger.error("ERROR: response has no JSON payload.")
 173        if response.status_code == 200 or response.status_code == 202:
 174            if has_json:
 175                self.__print_json_info(json_data, isInference)
 176            else:
 177                pass
 178            return Status.SUCCESS
 179        if response.status_code == 404:
 180            if has_json:
 181                self.__print_json_info(json_data, isInference)
 182            raise BoostedAPIException(
 183                'Server "{0}" not reachable.  Code {1}.'.format(
 184                    self.base_uri, response.status_code
 185                ),
 186                data=response,
 187            )
 188        if response.status_code == 400:
 189            if has_json:
 190                self.__print_json_info(json_data, isInference)
 191            if isInference:
 192                return Status.FAIL
 193            else:
 194                raise BoostedAPIException("Error, bad request.  Check the dataset ID.", response)
 195        if response.status_code == 401:
 196            if has_json:
 197                self.__print_json_info(json_data, isInference)
 198            raise BoostedAPIException("Authorization error.", response)
 199        else:
 200            if has_json:
 201                self.__print_json_info(json_data, isInference)
 202            raise BoostedAPIException(
 203                "Error in API response.  Status code={0} {1}\n{2}".format(
 204                    response.status_code, response.reason, response.headers
 205                ),
 206                response,
 207            )
 208
 209    def _try_extract_error_code(self, result):
 210        logger.info(result.headers)
 211        if "Content-Type" in result.headers:
 212            if result.headers["Content-Type"].startswith("application/json"):
 213                if "errors" in result.json():
 214                    return result.json()["errors"]
 215            if result.headers["Content-Type"].startswith("text/plain"):
 216                return result.text
 217        return str(result.reason)
 218
 219    def _check_ok_or_err_with_msg(self, res, potential_error_msg: str):
 220        if not res.ok:
 221            error = self._try_extract_error_code(res)
 222            logger.error(error)
 223            raise BoostedAPIException(f"{potential_error_msg}: {error}")
 224
 225    def _get_portfolio_rebalance_from_periods(
 226        self, portfolio_id: str, rel_periods: List[str]
 227    ) -> List[datetime.date]:
 228        """
 229        Returns a list of rebalance dates for a portfolio given a list of
 230        relative periods of format '1D', '1W', '3M', etc.
 231        """
 232        resp = self._get_graphql(
 233            query=graphql_queries.GET_PORTFOLIO_RELATIVE_DATES_QUERY,
 234            variables={"portfolioId": portfolio_id, "relativePeriods": rel_periods},
 235        )
 236        dates = resp["data"]["portfolio"]["relativeDates"]
 237        return [datetime.datetime.strptime(d["date"], "%Y-%m-%d").date() for d in dates]
 238
 239    def translate_text(self, language: Optional[Union[Language, str]], text: str) -> str:
 240        if not language or language == Language.ENGLISH:
 241            # By default, do not translate English
 242            return text
 243
 244        params = {"text": text, "langCode": language}
 245        url = self.base_uri + "/api/translate/translate-text"
 246        headers = {"Authorization": "ApiKey " + self.api_key}
 247        logger.info("Translating text...")
 248        res = requests.post(url, json=params, headers=headers, **self._request_params)
 249        try:
 250            result = res.json()["translatedText"]
 251        except Exception:
 252            raise BoostedAPIException("Error translating text")
 253        return result
 254
 255    def query_dataset(self, dataset_id):
 256        url = self.base_uri + "/api/datasets/{0}".format(dataset_id)
 257        headers = {"Authorization": "ApiKey " + self.api_key}
 258        res = requests.get(url, headers=headers, **self._request_params)
 259        if res.ok:
 260            return res.json()
 261        else:
 262            error_msg = self._try_extract_error_code(res)
 263            logger.error(error_msg)
 264            raise BoostedAPIException("Failed to query dataset: {0}.".format(error_msg))
 265
 266    def query_namespace_dataset_id(self, namespace, data_type):
 267        url = self.base_uri + f"/api/custom-security-dataset/{namespace}/{data_type}"
 268        headers = {"Authorization": "ApiKey " + self.api_key}
 269        res = requests.get(url, headers=headers, **self._request_params)
 270        if res.ok:
 271            return res.json()["result"]["id"]
 272        else:
 273            if res.status_code != 404:
 274                error_msg = self._try_extract_error_code(res)
 275                logger.error(error_msg)
 276                raise BoostedAPIException("Failed to query dataset: {0}.".format(error_msg))
 277            else:
 278                return None
 279
 280    def export_global_data(
 281        self,
 282        dataset_id,
 283        start=(datetime.date.today() - timedelta(days=365 * 25)),
 284        end=datetime.date.today(),
 285        timeout=600,
 286    ):
 287        query_info = self.query_dataset(dataset_id)
 288        if DataSetType[query_info["type"]] != DataSetType.GLOBAL:
 289            raise BoostedAPIException(
 290                f"Incorrect dataset type: {query_info['type']}" f" - Expected {DataSetType.GLOBAL}"
 291            )
 292        return self.export_data(dataset_id, start, end, timeout)
 293
 294    def export_independent_data(
 295        self,
 296        dataset_id,
 297        start=(datetime.date.today() - timedelta(days=365 * 25)),
 298        end=datetime.date.today(),
 299        timeout=600,
 300    ):
 301        query_info = self.query_dataset(dataset_id)
 302        if DataSetType[query_info["type"]] != DataSetType.STRATEGY:
 303            raise BoostedAPIException(
 304                f"Incorrect dataset type: {query_info['type']}"
 305                f" - Expected {DataSetType.STRATEGY}"
 306            )
 307        return self.export_data(dataset_id, start, end, timeout)
 308
 309    def export_dependent_data(
 310        self,
 311        dataset_id,
 312        start=None,
 313        end=None,
 314        timeout=600,
 315    ):
 316        query_info = self.query_dataset(dataset_id)
 317        if DataSetType[query_info["type"]] != DataSetType.STOCK:
 318            raise BoostedAPIException(
 319                f"Incorrect dataset type: {query_info['type']}" f" - Expected {DataSetType.STOCK}"
 320            )
 321
 322        valid_date_range = self.getDatasetDates(dataset_id)
 323        validStart = valid_date_range["validFrom"]
 324        validEnd = valid_date_range["validTo"]
 325
 326        if start is None:
 327            logger.info("Since no start date provided, starting from {0}.".format(validStart))
 328            start = validStart
 329        if end is None:
 330            logger.info("Since no end date provided, ending at {0}.".format(validEnd))
 331            end = validEnd
 332        start = self.__to_date_obj(start)
 333        end = self.__to_date_obj(end)
 334        if start < validStart:
 335            logger.info("Data does not exist before {0}.".format(validStart))
 336            logger.info("Starting from {0}.".format(validStart))
 337            start = validStart
 338        if end > validEnd:
 339            logger.info("Data does not exist after {0}.".format(validEnd))
 340            logger.info("Ending at {0}.".format(validEnd))
 341            end = validEnd
 342        validate_start_and_end_dates(start, end)
 343
 344        logger.info("Data exists from {0} to {1}.".format(start, end))
 345        request_url = "/api/datasets/" + dataset_id + "/export-data"
 346        headers = {"Authorization": "ApiKey " + self.api_key}
 347
 348        data_chunks = []
 349        chunk_size_days = 90
 350        while start <= end:
 351            chunk_end = start + timedelta(days=chunk_size_days)
 352            if chunk_end > end:
 353                chunk_end = end
 354
 355            logger.info("Requesting start={0} end={1}.".format(start, chunk_end))
 356            params = {"start": self.__iso_format(start), "end": self.__iso_format(chunk_end)}
 357            logger.debug("URL={0}, headers={1}, params={2}".format(request_url, headers, params))
 358
 359            res = requests.get(
 360                self.base_uri + request_url,
 361                headers=headers,
 362                params=params,
 363                timeout=timeout,
 364                **self._request_params,
 365            )
 366
 367            if res.ok:
 368                buf = io.StringIO(res.text)
 369                df = pd.read_csv(buf, index_col=0, parse_dates=True)
 370                if "price" in df.columns:
 371                    df = df.drop("price", axis=1)
 372                data_chunks.append(df)
 373            else:
 374                error_msg = self._try_extract_error_code(res)
 375                logger.error(error_msg)
 376                raise BoostedAPIException("Failed to query dataset: {0}.".format(error_msg))
 377
 378            start = start + timedelta(days=chunk_size_days + 1)
 379
 380        return pd.concat(data_chunks)
 381
 382    def export_custom_security_data(
 383        self,
 384        dataset_id,
 385        start=(date.today() - timedelta(days=365 * 25)),
 386        end=date.today(),
 387        timeout=600,
 388    ):
 389        query_info = self.query_dataset(dataset_id)
 390        if DataSetType[query_info["type"]] != DataSetType.SECURITIES_DAILY:
 391            raise BoostedAPIException(
 392                f"Incorrect dataset type: {query_info['type']}"
 393                f" - Expected {DataSetType.SECURITIES_DAILY}"
 394            )
 395        return self.export_data(dataset_id, start, end, timeout)
 396
 397    def export_data(
 398        self,
 399        dataset_id,
 400        start=(datetime.date.today() - timedelta(days=365 * 25)),
 401        end=datetime.date.today(),
 402        timeout=600,
 403    ):
 404        logger.info("Requesting start={0} end={1}.".format(start, end))
 405        request_url = "/api/datasets/" + dataset_id + "/export-data"
 406        headers = {"Authorization": "ApiKey " + self.api_key}
 407        start = self.__iso_format(start)
 408        end = self.__iso_format(end)
 409        params = {"start": start, "end": end}
 410        logger.debug("URL={0}, headers={1}, params={2}".format(request_url, headers, params))
 411        res = requests.get(
 412            self.base_uri + request_url,
 413            headers=headers,
 414            params=params,
 415            timeout=timeout,
 416            **self._request_params,
 417        )
 418        if res.ok or self._check_status_code(res):
 419            buf = io.StringIO(res.text)
 420            df = pd.read_csv(buf, index_col=0, parse_dates=True)
 421            if "price" in df.columns:
 422                df = df.drop("price", axis=1)
 423            return df
 424        else:
 425            error_msg = self._try_extract_error_code(res)
 426            logger.error(error_msg)
 427            raise BoostedAPIException("Failed to query dataset: {0}.".format(error_msg))
 428
 429    def _get_inference(self, model_id, inference_date=datetime.date.today()):
 430        request_url = "/api/models/" + model_id + "/inference-results"
 431        headers = {"Authorization": "ApiKey " + self.api_key}
 432        params = {}
 433        params["date"] = self.__iso_format(inference_date)
 434        logger.debug(request_url + ", " + str(headers) + ", " + str(params))
 435        res = requests.get(
 436            self.base_uri + request_url, headers=headers, params=params, **self._request_params
 437        )
 438        status = self._check_status_code(res, isInference=True)
 439        if status == Status.SUCCESS:
 440            return res, status
 441        else:
 442            return None, status
 443
 444    def get_inference(
 445        self, model_id, inference_date=datetime.date.today(), block=False, timeout_minutes=30
 446    ):
 447        start_time = datetime.datetime.now()
 448        while True:
 449            for numRetries in range(3):
 450                res, status = self._get_inference(model_id, inference_date)
 451                if res is not None:
 452                    continue
 453                else:
 454                    if status == Status.FAIL:
 455                        return Status.FAIL
 456                    logger.info("Retrying...")
 457            if res is None:
 458                logger.error("Max retries reached.  Request failed.")
 459                return None
 460
 461            json_data = res.json()
 462            if "result" in json_data.keys():
 463                if json_data["result"]["status"] == "RUNNING":
 464                    still_running = True
 465                    if not block:
 466                        logger.warn("Inference job is still running.")
 467                        return None
 468                    else:
 469                        logger.info(
 470                            "Inference job is still running.  Time elapsed={0}.".format(
 471                                datetime.datetime.now() - start_time
 472                            )
 473                        )
 474                        time.sleep(10)
 475                else:
 476                    still_running = False
 477
 478                if not still_running and json_data["result"]["status"] == "COMPLETE":
 479                    csv = json_data["result"]["signals"]
 480                    logger.info(json_data["result"])
 481                    if self._check_status_code(res, isInference=True):
 482                        logger.info(
 483                            "Total run time = {0}.".format(datetime.datetime.now() - start_time)
 484                        )
 485                        return csv
 486            else:
 487                if "errors" in json_data.keys():
 488                    logger.error(json_data["errors"])
 489                else:
 490                    logger.error("Error getting inference for date {0}.".format(inference_date))
 491                return None
 492            if (datetime.datetime.now() - start_time).total_seconds() / 60.0 > timeout_minutes:
 493                logger.error("Timeout waiting for job completion.")
 494                return None
 495
 496    def createDataset(self, schema):
 497        request_url = "/api/datasets"
 498        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
 499        s = json.dumps(schema)
 500        logger.info("Creating dataset with schema " + s)
 501        res = requests.post(
 502            self.base_uri + request_url, data=s, headers=headers, **self._request_params
 503        )
 504        if res.ok:
 505            return res.json()["result"]
 506        else:
 507            raise BoostedAPIException("Dataset creation failed.")
 508
 509    def create_custom_namespace_dataset(self, namespace, schema):
 510        request_url = f"/api/custom-security-dataset/{namespace}"
 511        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
 512        s = json.dumps(schema)
 513        logger.info("Creating dataset with schema " + s)
 514        res = requests.post(
 515            self.base_uri + request_url, data=s, headers=headers, **self._request_params
 516        )
 517        if res.ok:
 518            return res.json()["result"]
 519        else:
 520            raise BoostedAPIException("Dataset creation failed.")
 521
 522    def getUniverse(self, modelId, date=None):
 523        if date is not None:
 524            url = "/api/models/{0}/universe/{1}".format(modelId, self.__iso_format(date))
 525            logger.info("Getting universe for date: {0}.".format(date))
 526        else:
 527            url = "/api/models/{0}/universe/".format(modelId)
 528        headers = {"Authorization": "ApiKey " + self.api_key}
 529        res = requests.get(self.base_uri + url, headers=headers, **self._request_params)
 530        if res.ok:
 531            buf = io.StringIO(res.text)
 532            df = pd.read_csv(buf, index_col=0, parse_dates=True)
 533            return df
 534        else:
 535            error = self._try_extract_error_code(res)
 536            logger.error(
 537                "There was a problem getting this universe or model ID: {0}.".format(error)
 538            )
 539            raise BoostedAPIException("Failed to get universe: {0}".format(error))
 540
 541    def add_custom_security_namespace_members(
 542        self, namespace, members: Union[pandas.DataFrame, str]
 543    ) -> Tuple[pandas.DataFrame, str]:
 544        url = self.base_uri + "/api/synthetic-datasets/{0}/generate".format(namespace)
 545        headers = {"Authorization": "ApiKey " + self.api_key}
 546        headers["Content-Type"] = "application/json"
 547        logger.info("Adding custom security namespace for namespace: {0}".format(namespace))
 548        strbuf = None
 549        if isinstance(members, pandas.DataFrame):
 550            df = members
 551            df_canon = df.rename(columns={_: to_camel_case(_) for _ in df.columns})
 552            canon_cols = ["Currency", "Symbol", "Country", "Name"]
 553            if set(canon_cols).difference(df_canon.columns):
 554                raise BoostedAPIException(f"Expected columns: {canon_cols}")
 555            df_canon = df_canon.loc[:, canon_cols]
 556            buf = io.StringIO()
 557            df_canon.to_json(buf, orient="records")
 558            strbuf = buf.getvalue()
 559        elif isinstance(members, str):
 560            strbuf = members
 561        else:
 562            raise BoostedAPIException(f"Unsupported members argument type: {type(members)}")
 563        res = requests.post(url, data=strbuf, headers=headers, **self._request_params)
 564        if res.ok:
 565            res_obj = res.json()
 566            res_df = pandas.Series(res_obj["generatedISIN"]).to_frame()
 567            res_df.index.name = "Symbol"
 568            res_df.columns = ["ISIN"]
 569            logger.info("Add to custom security namespace successful.")
 570            if "warnings" in res_obj:
 571                logger.info("Warnings: {0}.".format(res.json()["warnings"]))
 572                return res_df, res.json()["warnings"]
 573            else:
 574                return res_df, "No warnings."
 575        else:
 576            error_msg = self._try_extract_error_code(res)
 577            raise BoostedAPIException("Failed to get universe: {0}.".format(error_msg))
 578
 579    def updateUniverse(self, modelId, universe_df, date=datetime.date.today() + timedelta(1)):
 580        date = self.__iso_format(date)
 581        url = self.base_uri + "/api/models/{0}/universe/{1}".format(modelId, date)
 582        headers = {"Authorization": "ApiKey " + self.api_key}
 583        logger.info("Updating universe for date {0}.".format(date))
 584        if isinstance(universe_df, pd.core.frame.DataFrame):
 585            buf = io.StringIO()
 586            universe_df.to_csv(buf)
 587            target = ("uploaded_universe.csv", buf.getvalue(), "text/csv")
 588            files_req = {}
 589            files_req["universe"] = target
 590            res = requests.post(url, files=files_req, headers=headers, **self._request_params)
 591        elif isinstance(universe_df, str):
 592            target = ("uploaded_universe.csv", universe_df, "text/csv")
 593            files_req = {}
 594            files_req["universe"] = target
 595            res = requests.post(url, files=files_req, headers=headers, **self._request_params)
 596        else:
 597            raise BoostedAPIException("Expected CSV as str or Pandas DataFrame.")
 598        if res.ok:
 599            logger.info("Universe update successful.")
 600            if "warnings" in res.json():
 601                logger.info("Warnings: {0}.".format(res.json()["warnings"]))
 602                return res.json()["warnings"]
 603            else:
 604                return "No warnings."
 605        else:
 606            error_msg = self._try_extract_error_code(res)
 607            raise BoostedAPIException("Failed to get universe: {0}.".format(error_msg))
 608
 609    def create_universe(
 610        self, universe: Union[pd.DataFrame, str], name: str, description: str
 611    ) -> List[str]:
 612        PRESENT = "PRESENT"
 613        ANY = "ANY"
 614        EARLIST_DATE = "1900-01-01"
 615        LATEST_DATE = "4000-01-01"
 616
 617        if isinstance(universe, (str, bytes, os.PathLike)):
 618            universe = pd.read_csv(universe)
 619
 620        universe.columns = universe.columns.str.lower()
 621
 622        # Clients are free to leave out data. Fill in some defaults here.
 623        if "from" not in universe.columns:
 624            universe["from"] = EARLIST_DATE
 625        if "to" not in universe.columns:
 626            universe["to"] = LATEST_DATE
 627        if "currency" not in universe.columns:
 628            universe["currency"] = ANY
 629        if "country" not in universe.columns:
 630            universe["country"] = ANY
 631        if "isin" not in universe.columns:
 632            universe["isin"] = None
 633        if "symbol" not in universe.columns:
 634            universe["symbol"] = None
 635
 636        # to prevent conflicts with python keywords
 637        universe.rename(columns={"from": "from_date", "to": "to_date"}, inplace=True)
 638
 639        universe = universe.replace({np.nan: None})
 640        security_country_currency_date_list = []
 641        for i, r in enumerate(universe.itertuples()):
 642            id_type = ColumnSubRole.ISIN
 643            identifier = r.isin
 644
 645            if identifier is None:
 646                id_type = ColumnSubRole.SYMBOL
 647                identifier = str(r.symbol)
 648
 649            # if identifier is still None, it means that there is no ISIN or
 650            # SYMBOL for this row, in which case we throw an error
 651            if identifier is None:
 652                raise BoostedAPIException(
 653                    (
 654                        f"Missing identifier column in universe row {i + 1}"
 655                        " should contain ISIN or Symbol"
 656                    )
 657                )
 658
 659            security_country_currency_date_list.append(
 660                DateIdentCountryCurrency(
 661                    date=r.from_date or EARLIST_DATE,
 662                    identifier=identifier,
 663                    country=r.country or ANY,
 664                    currency=r.currency or ANY,
 665                    id_type=id_type,
 666                )
 667            )
 668
 669        gbi_id_objs = self.getGbiIdFromIdentCountryCurrencyDate(security_country_currency_date_list)
 670
 671        security_list = []
 672        for i, r in enumerate(universe.itertuples()):
 673            # if we have a None here, we failed to map to a gbi id
 674            if gbi_id_objs[i] is None:
 675                raise BoostedAPIException(f"Unable to map row: {tuple(r)}")
 676
 677            security_list.append(
 678                {
 679                    "stockId": gbi_id_objs[i].gbi_id,
 680                    "fromZ": r.from_date or EARLIST_DATE,
 681                    "toZ": LATEST_DATE if r.to_date in (PRESENT, None) else r.to_date,
 682                    "removal": False,
 683                    "source": "UPLOAD",
 684                }
 685            )
 686
 687        url = self.base_uri + "/api/template-universe/save"
 688        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
 689        req = {"name": name, "description": description, "modificationDaos": security_list}
 690
 691        res = requests.post(url, json=req, headers=headers, **self._request_params)
 692        self._check_ok_or_err_with_msg(res, "Failed to create universe")
 693
 694        if "warnings" in res.json():
 695            logger.info("Warnings: {0}.".format(res.json()["warnings"]))
 696            return res.json()["warnings"].splitlines()
 697        else:
 698            return []
 699
 700    def validate_dataframe(self, df):
 701        if not isinstance(df, pd.core.frame.DataFrame):
 702            logger.error("Dataset must be of type Dataframe.")
 703            return False
 704        if type(df.index) != pd.core.indexes.datetimes.DatetimeIndex:
 705            logger.error("Index must be DatetimeIndex.")
 706            return False
 707        if len(df.columns) == 0:
 708            logger.error("No feature columns exist.")
 709            return False
 710        if len(df) == 0:
 711            logger.error("No rows exist.")
 712        return True
 713
 714    def get_dataset_schema(self, dataset_id):
 715        url = self.base_uri + "/api/datasets/{0}/schema".format(dataset_id)
 716        headers = {"Authorization": "ApiKey " + self.api_key}
 717        res = requests.get(url, headers=headers, **self._request_params)
 718        if res.ok:
 719            json_schema = res.json()
 720        else:
 721            error_msg = self._try_extract_error_code(res)
 722            logger.error(error_msg)
 723            raise BoostedAPIException("Failed to query dataset: {0}.".format(error_msg))
 724        return DataSetConfig.fromDict(json_schema["result"])
 725
 726    def add_custom_security_daily_dataset(
 727        self, namespace, dataset, schema=None, timeout=600, block=True
 728    ):
 729        result = self.add_custom_security_daily_dataset_with_warnings(
 730            namespace, dataset, schema, timeout, block
 731        )
 732        return result["dataset_id"]
 733
 734    def add_custom_security_daily_dataset_with_warnings(
 735        self,
 736        namespace,
 737        dataset,
 738        schema=None,
 739        timeout=600,
 740        block=True,
 741        no_exception_on_chunk_error=False,
 742    ):
 743        dataset_type = DataSetType.SECURITIES_DAILY
 744        dsid = self.query_namespace_dataset_id(namespace, dataset_type)
 745
 746        if not self.validate_dataframe(dataset):
 747            logger.error("dataset failed validation.")
 748            return None
 749
 750        if dsid is None:
 751            # create the dataset if not exist.
 752            schema = infer_dataset_schema(
 753                "custom_security_daily", dataset, dataset_type, infer_from_column_names=True
 754            )
 755            dsid = self.create_custom_namespace_dataset(namespace, schema.toDict())
 756            data_type = DataAddType.CREATION
 757        elif schema is not None:
 758            raise ValueError(
 759                f"Dataset schema already exists for namespace={namespace}, type={dataset_type}",
 760                ", cannot create another!",
 761            )
 762        else:
 763            data_type = DataAddType.HISTORICAL
 764
 765        logger.info("Created dataset with ID = {0}, uploading...".format(dsid))
 766        result = self.add_custom_security_daily_data(
 767            dsid,
 768            dataset,
 769            timeout,
 770            block,
 771            data_type=data_type,
 772            no_exception_on_chunk_error=no_exception_on_chunk_error,
 773        )
 774        return {
 775            "namespace": namespace,
 776            "dataset_id": dsid,
 777            "warnings": result["warnings"],
 778            "errors": result["errors"],
 779        }
 780
 781    def add_custom_security_daily_data(
 782        self,
 783        dataset_id,
 784        csv_data,
 785        timeout=600,
 786        block=True,
 787        data_type=DataAddType.HISTORICAL,
 788        no_exception_on_chunk_error=False,
 789    ):
 790        warnings = []
 791        query_info = self.query_dataset(dataset_id)
 792        if DataSetType[query_info["type"]] != DataSetType.SECURITIES_DAILY:
 793            raise BoostedAPIException(
 794                f"Incorrect dataset type: {query_info['type']}"
 795                f" - Expected {DataSetType.SECURITIES_DAILY}"
 796            )
 797        warnings, errors = self.setup_chunk_and_upload_data(
 798            dataset_id, csv_data, data_type, timeout, block, no_exception_on_chunk_error
 799        )
 800        if len(warnings) > 0:
 801            logger.warning(
 802                "Encountered {0} total warnings while uploading dataset.".format(len(warnings))
 803            )
 804        if len(errors) > 0:
 805            raise BoostedAPIException(
 806                "Encountered {0} total ERRORS while uploading dataset".format(len(errors))
 807                + "\n".join(errors)
 808            )
 809        return {"warnings": warnings, "errors": errors}
 810
 811    def add_dependent_dataset(
 812        self, dataset, datasetName="DependentDataset", schema=None, timeout=600, block=True
 813    ):
 814        result = self.add_dependent_dataset_with_warnings(
 815            dataset, datasetName, schema, timeout, block
 816        )
 817        return result["dataset_id"]
 818
 819    def add_dependent_dataset_with_warnings(
 820        self,
 821        dataset,
 822        datasetName="DependentDataset",
 823        schema=None,
 824        timeout=600,
 825        block=True,
 826        no_exception_on_chunk_error=False,
 827    ):
 828        if not self.validate_dataframe(dataset):
 829            logger.error("dataset failed validation.")
 830            return None
 831        if schema is None:
 832            schema = infer_dataset_schema(datasetName, dataset, DataSetType.STOCK)
 833        dsid = self.createDataset(schema.toDict())
 834        logger.info("Creating dataset with ID = {0}.".format(dsid))
 835        result = self.add_dependent_data(
 836            dsid,
 837            dataset,
 838            timeout,
 839            block,
 840            data_type=DataAddType.CREATION,
 841            no_exception_on_chunk_error=no_exception_on_chunk_error,
 842        )
 843        return {"dataset_id": dsid, "warnings": result["warnings"], "errors": result["errors"]}
 844
 845    def add_independent_dataset(
 846        self, dataset, datasetName="IndependentDataset", schema=None, timeout=600, block=True
 847    ):
 848        result = self.add_independent_dataset_with_warnings(
 849            dataset, datasetName, schema, timeout, block
 850        )
 851        return result["dataset_id"]
 852
 853    def add_independent_dataset_with_warnings(
 854        self,
 855        dataset,
 856        datasetName="IndependentDataset",
 857        schema=None,
 858        timeout=600,
 859        block=True,
 860        no_exception_on_chunk_error=False,
 861    ):
 862        if not self.validate_dataframe(dataset):
 863            logger.error("dataset failed validation.")
 864            return None
 865        if schema is None:
 866            schema = infer_dataset_schema(datasetName, dataset, DataSetType.STRATEGY)
 867        schemaDict = schema.toDict()
 868        if "configurationDataJson" not in schemaDict:
 869            schemaDict["configurationDataJson"] = "{}"
 870        dsid = self.createDataset(schemaDict)
 871        logger.info("Creating dataset with ID = {0}.".format(dsid))
 872        result = self.add_independent_data(
 873            dsid,
 874            dataset,
 875            timeout,
 876            block,
 877            data_type=DataAddType.CREATION,
 878            no_exception_on_chunk_error=no_exception_on_chunk_error,
 879        )
 880        return {"dataset_id": dsid, "warnings": result["warnings"], "errors": result["errors"]}
 881
 882    def add_global_dataset(
 883        self, dataset, datasetName="GlobalDataset", schema=None, timeout=600, block=True
 884    ):
 885        result = self.add_global_dataset_with_warnings(dataset, datasetName, schema, timeout, block)
 886        return result["dataset_id"]
 887
 888    def add_global_dataset_with_warnings(
 889        self,
 890        dataset,
 891        datasetName="GlobalDataset",
 892        schema=None,
 893        timeout=600,
 894        block=True,
 895        no_exception_on_chunk_error=False,
 896    ):
 897        if not self.validate_dataframe(dataset):
 898            logger.error("dataset failed validation.")
 899            return None
 900        if schema is None:
 901            schema = infer_dataset_schema(datasetName, dataset, DataSetType.GLOBAL)
 902        dsid = self.createDataset(schema.toDict())
 903        logger.info("Creating dataset with ID = {0}.".format(dsid))
 904        result = self.add_global_data(
 905            dsid,
 906            dataset,
 907            timeout,
 908            block,
 909            data_type=DataAddType.CREATION,
 910            no_exception_on_chunk_error=no_exception_on_chunk_error,
 911        )
 912        return {"dataset_id": dsid, "warnings": result["warnings"], "errors": result["errors"]}
 913
 914    def add_independent_data(
 915        self,
 916        dataset_id,
 917        csv_data,
 918        timeout=600,
 919        block=True,
 920        data_type=DataAddType.HISTORICAL,
 921        no_exception_on_chunk_error=False,
 922    ):
 923        query_info = self.query_dataset(dataset_id)
 924        if DataSetType[query_info["type"]] != DataSetType.STRATEGY:
 925            raise BoostedAPIException(
 926                f"Incorrect dataset type: {query_info['type']}"
 927                f" - Expected {DataSetType.STRATEGY}"
 928            )
 929        warnings, errors = self.setup_chunk_and_upload_data(
 930            dataset_id, csv_data, data_type, timeout, block, no_exception_on_chunk_error
 931        )
 932        if len(warnings) > 0:
 933            logger.warning(
 934                "Encountered {0} total warnings while uploading dataset.".format(len(warnings))
 935            )
 936        if len(errors) > 0:
 937            raise BoostedAPIException(
 938                "Encountered {0} total ERRORS while uploading dataset".format(len(errors))
 939                + "\n".join(errors)
 940            )
 941        return {"warnings": warnings, "errors": errors}
 942
 943    def add_dependent_data(
 944        self,
 945        dataset_id,
 946        csv_data,
 947        timeout=600,
 948        block=True,
 949        data_type=DataAddType.HISTORICAL,
 950        no_exception_on_chunk_error=False,
 951    ):
 952        warnings = []
 953        query_info = self.query_dataset(dataset_id)
 954        if DataSetType[query_info["type"]] != DataSetType.STOCK:
 955            raise BoostedAPIException(
 956                f"Incorrect dataset type: {query_info['type']}" f" - Expected {DataSetType.STOCK}"
 957            )
 958        warnings, errors = self.setup_chunk_and_upload_data(
 959            dataset_id, csv_data, data_type, timeout, block, no_exception_on_chunk_error
 960        )
 961        if len(warnings) > 0:
 962            logger.warning(
 963                "Encountered {0} total warnings while uploading dataset.".format(len(warnings))
 964            )
 965        if len(errors) > 0:
 966            raise BoostedAPIException(
 967                "Encountered {0} total ERRORS while uploading dataset".format(len(errors))
 968                + "\n".join(errors)
 969            )
 970        return {"warnings": warnings, "errors": errors}
 971
 972    def add_global_data(
 973        self,
 974        dataset_id,
 975        csv_data,
 976        timeout=600,
 977        block=True,
 978        data_type=DataAddType.HISTORICAL,
 979        no_exception_on_chunk_error=False,
 980    ):
 981        query_info = self.query_dataset(dataset_id)
 982        if DataSetType[query_info["type"]] != DataSetType.GLOBAL:
 983            raise BoostedAPIException(
 984                f"Incorrect dataset type: {query_info['type']}" f" - Expected {DataSetType.GLOBAL}"
 985            )
 986        warnings, errors = self.setup_chunk_and_upload_data(
 987            dataset_id, csv_data, data_type, timeout, block, no_exception_on_chunk_error
 988        )
 989        if len(warnings) > 0:
 990            logger.warning(
 991                "Encountered {0} total warnings while uploading dataset.".format(len(warnings))
 992            )
 993        if len(errors) > 0:
 994            raise BoostedAPIException(
 995                "Encountered {0} total ERRORS while uploading dataset".format(len(errors))
 996                + "\n".join(errors)
 997            )
 998        return {"warnings": warnings, "errors": errors}
 999
1000    def get_csv_buffer(self):
1001        return io.StringIO()
1002
1003    def start_chunked_upload(self, dataset_id):
1004        url = self.base_uri + "/api/datasets/{0}/start-chunked-upload".format(dataset_id)
1005        headers = {"Authorization": "ApiKey " + self.api_key}
1006        res = requests.post(url, headers=headers, **self._request_params)
1007        if res.ok:
1008            return res.json()["result"]
1009        else:
1010            error_msg = self._try_extract_error_code(res)
1011            logger.error(error_msg)
1012            raise BoostedAPIException(
1013                "Failed to obtain dataset lock for upload: {0}.".format(error_msg)
1014            )
1015
1016    def abort_chunked_upload(self, dataset_id, chunk_id):
1017        url = self.base_uri + "/api/datasets/{0}/abort-chunked-upload".format(dataset_id)
1018        headers = {"Authorization": "ApiKey " + self.api_key}
1019        params = {"uploadGroupId": chunk_id}
1020        res = requests.post(url, headers=headers, **self._request_params, params=params)
1021        if not res.ok:
1022            error_msg = self._try_extract_error_code(res)
1023            logger.error(error_msg)
1024            raise BoostedAPIException(
1025                "Failed to abort dataset lock during error: {0}.".format(error_msg)
1026            )
1027
1028    def check_dataset_ingestion_completion(self, dataset_id, chunk_id, start_time):
1029        url = self.base_uri + "/api/datasets/{0}/upload-chunk-status".format(dataset_id)
1030        headers = {"Authorization": "ApiKey " + self.api_key}
1031        params = {"uploadGroupId": chunk_id}
1032        res = requests.get(url, headers=headers, **self._request_params, params=params)
1033        res = res.json()
1034
1035        finished = False
1036        warnings = []
1037        errors = []
1038
1039        if type(res) == dict:
1040            dataset_status = res["datasetStatus"]
1041            chunk_status = res["chunkStatus"]
1042            if chunk_status != ChunkStatus.PROCESSING.value:
1043                finished = True
1044                errors = res["errors"]
1045                warnings = res["warnings"]
1046                successful_rows = res["successfulRows"]
1047                total_rows = res["totalRows"]
1048                logger.info(
1049                    f"Successfully ingested {successful_rows} out of {total_rows} uploaded rows."
1050                )
1051                if chunk_status in [
1052                    ChunkStatus.SUCCESS.value,
1053                    ChunkStatus.WARNING.value,
1054                    ChunkStatus.ERROR.value,
1055                ]:
1056                    if dataset_status != "AVAILABLE":
1057                        raise BoostedAPIException(
1058                            "Dataset was unexpectedly unavailable after chunk upload finished."
1059                        )
1060                    else:
1061                        logger.info("Ingestion complete.  Uploaded data is ready for use.")
1062                elif chunk_status == ChunkStatus.ABORTED.value:
1063                    errors.append(
1064                        "Dataset chunk upload was aborted by server! Upload did not succeed."
1065                    )
1066                else:
1067                    errors.append("Unexpected data ingestion status: {0}.".format(chunk_status))
1068            logger.info(
1069                "Data ingestion still running.  Time elapsed={0}.".format(
1070                    datetime.datetime.now() - start_time
1071                )
1072            )
1073        else:
1074            raise BoostedAPIException("Unable to get status of dataset ingestion.")
1075        return {"finished": finished, "warnings": warnings, "errors": errors}
1076
1077    def _commit_chunked_upload(self, dataset_id, chunk_id, data_type, block=True, timeout=600):
1078        url = self.base_uri + "/api/datasets/{0}/commit-chunked-upload".format(dataset_id)
1079        headers = {"Authorization": "ApiKey " + self.api_key}
1080        params = {
1081            "uploadGroupId": chunk_id,
1082            "dataAddType": data_type,
1083            "sendCompletionEmail": not block,
1084        }
1085        res = requests.post(url, headers=headers, **self._request_params, params=params)
1086        if not res.ok:
1087            error_msg = self._try_extract_error_code(res)
1088            logger.error(error_msg)
1089            raise BoostedAPIException("Failed to commit dataset files: {0}.".format(error_msg))
1090
1091        if block:
1092            start_time = datetime.datetime.now()
1093            # Keep waiting until upload is no longer in UPDATING state...
1094            while True:
1095                result = self.check_dataset_ingestion_completion(dataset_id, chunk_id, start_time)
1096                if result["finished"]:
1097                    break
1098
1099                if (datetime.datetime.now() - start_time).total_seconds() > timeout:
1100                    err_str = (
1101                        f"Timeout waiting for commit of dataset: {dataset_id} | chunk: {chunk_id}"
1102                    )
1103                    logger.error(err_str)
1104                    return [], [err_str]
1105
1106                time.sleep(10)
1107            return result["warnings"], result["errors"]
1108        else:
1109            return [], []
1110
1111    def setup_chunk_and_upload_data(
1112        self,
1113        dataset_id,
1114        csv_data,
1115        data_type,
1116        timeout=600,
1117        block=True,
1118        no_exception_on_chunk_error=False,
1119    ):
1120        chunk_id = self.start_chunked_upload(dataset_id)
1121        logger.info("Obtained lock on dataset for upload: " + chunk_id)
1122        try:
1123            warnings, errors = self.chunk_and_upload_data(
1124                dataset_id, chunk_id, csv_data, timeout, no_exception_on_chunk_error
1125            )
1126            commit_warnings, commit_errors = self._commit_chunked_upload(
1127                dataset_id, chunk_id, data_type, block, timeout
1128            )
1129            return warnings + commit_warnings, errors + commit_errors
1130        except Exception:
1131            self.abort_chunked_upload(dataset_id, chunk_id)
1132            raise
1133
1134    def chunk_and_upload_data(
1135        self, dataset_id, chunk_id, csv_data, timeout=600, no_exception_on_chunk_error=False
1136    ):
1137        if isinstance(csv_data, pd.core.frame.DataFrame):
1138            if not isinstance(csv_data.index, pd.core.indexes.datetimes.DatetimeIndex):
1139                raise BoostedAPIException("DataFrame must have DatetimeIndex as index type.")
1140
1141            warnings = []
1142            errors = []
1143            logger.info("Uploading yearly.")
1144            for t in csv_data.index.to_period("Y").unique():
1145                if t is pd.NaT:
1146                    continue
1147
1148                # serialize bit to string
1149                buf = self.get_csv_buffer()
1150                yearly_csv = csv_data.loc[str(t)]
1151                yearly_csv.to_csv(buf, header=True)
1152                raw_csv = buf.getvalue()
1153
1154                # we are already chunking yearly... but if the csv still exceeds a healthy
1155                # limit of 50mb the final line of defence is to ignore date boundaries and
1156                # just chunk the rows. This is mostly for the cloudflare upload limit.
1157                size_lim = 50 * 1000 * 1000
1158                est_csv_size = sys.getsizeof(raw_csv)
1159                if est_csv_size > size_lim:
1160                    del raw_csv, buf
1161                    logger.info("Yearly data too large for single upload, chunking further...")
1162                    chunks = []
1163                    nchunks = math.ceil(est_csv_size / size_lim)
1164                    rows_per_chunk = math.ceil(len(yearly_csv) / nchunks)
1165                    for i in range(0, len(yearly_csv), rows_per_chunk):
1166                        buf = self.get_csv_buffer()
1167                        split_csv = yearly_csv.iloc[i : i + rows_per_chunk]
1168                        split_csv.to_csv(buf, header=True)
1169                        split_csv = buf.getvalue()
1170                        chunks.append(
1171                            (
1172                                "{0}-{1}".format(i + 1, min(len(yearly_csv), i + rows_per_chunk)),
1173                                split_csv,
1174                            )
1175                        )
1176                else:
1177                    chunks = [("all", raw_csv)]
1178
1179                for i, (rows_descriptor, chunk_csv) in enumerate(chunks):
1180                    chunk_descriptor = "{0} in yearly chunk {1}".format(rows_descriptor, t)
1181                    logger.info(
1182                        "Uploading rows:"
1183                        + chunk_descriptor
1184                        + " (chunk {0} of {1}):".format(i + 1, len(chunks))
1185                    )
1186                    _, new_warnings, new_errors = self.upload_dataset_chunk(
1187                        chunk_descriptor,
1188                        dataset_id,
1189                        chunk_id,
1190                        chunk_csv,
1191                        timeout,
1192                        no_exception_on_chunk_error,
1193                    )
1194                    warnings.extend(new_warnings)
1195                    errors.extend(new_errors)
1196            return warnings, errors
1197
1198        elif isinstance(csv_data, str):
1199            _, warnings, errors = self.upload_dataset_chunk(
1200                "all data", dataset_id, chunk_id, csv_data, timeout, no_exception_on_chunk_error
1201            )
1202            return warnings, errors
1203        else:
1204            raise BoostedAPIException("Expected CSV as str or Pandas DataFrame.")
1205
1206    def upload_dataset_chunk(
1207        self,
1208        chunk_descriptor,
1209        dataset_id,
1210        chunk_id,
1211        csv_data,
1212        timeout=600,
1213        no_exception_on_chunk_error=False,
1214    ):
1215        logger.info("Starting upload: " + chunk_descriptor)
1216        url = self.base_uri + "/api/datasets/{0}/upload-dataset-chunk".format(dataset_id)
1217        headers = {"Authorization": "ApiKey " + self.api_key}
1218        files_req = {}
1219        warnings = []
1220        errors = []
1221
1222        # make the network request
1223        target = ("uploaded_data.csv", csv_data, "text/csv")
1224        files_req["dataFile"] = target
1225        params = {"uploadGroupId": chunk_id}
1226        res = requests.post(
1227            url,
1228            params=params,
1229            files=files_req,
1230            headers=headers,
1231            timeout=timeout,
1232            **self._request_params,
1233        )
1234
1235        if res.ok:
1236            logger.info(
1237                (
1238                    "Chunk upload completed.  "
1239                    "Ingestion started.  "
1240                    "Please wait until the data is in AVAILABLE state."
1241                )
1242            )
1243            if "warnings" in res.json():
1244                warnings = res.json()["warnings"]
1245                if len(warnings) > 0:
1246                    logger.warning("Uploaded chunk encountered data warnings: ")
1247                for w in warnings:
1248                    logger.warning(w)
1249        else:
1250            reason = "Upload failed: {0}, {1}".format(res.text, res.reason)
1251            logger.error(reason)
1252            if no_exception_on_chunk_error:
1253                errors.append(
1254                    "Chunk {0} failed: {1}. ".format(chunk_descriptor, reason)
1255                    + "Your data was only PARTIALLY uploaded. "
1256                    + "Please reattempt the upload of this chunk."
1257                )
1258            else:
1259                raise BoostedAPIException(reason)
1260
1261        return res, warnings, errors
1262
1263    def getAllocationsForDate(self, portfolio_id, date, rollback_to_last_available_date):
1264        date = self.__iso_format(date)
1265        endpoint = "latest-allocations" if rollback_to_last_available_date else "allocations"
1266        url = self.base_uri + "/api/portfolios/{0}/{1}".format(portfolio_id, endpoint)
1267        headers = {"Authorization": "ApiKey " + self.api_key}
1268        params = {"date": date}
1269        logger.info("Retrieving allocations information for date {0}.".format(date))
1270        res = requests.get(url, params=params, headers=headers, **self._request_params)
1271        if res.ok:
1272            logger.info("Allocations retrieval successful.")
1273            return res.json()
1274        else:
1275            error_msg = self._try_extract_error_code(res)
1276            raise BoostedAPIException("Failed to retrieve allocations: {0}.".format(error_msg))
1277
1278    # New API method for fetching data from portfolio_holdings.pb2 file.
1279    def getAllocationsForDateV2(self, portfolio_id, date, rollback_to_last_available_date):
1280        date = self.__iso_format(date)
1281        endpoint = "latest-allocations-v2" if rollback_to_last_available_date else "allocations-v2"
1282        url = self.base_uri + "/api/portfolios/{0}/{1}".format(portfolio_id, endpoint)
1283        headers = {"Authorization": "ApiKey " + self.api_key}
1284        params = {"date": date}
1285        logger.info("Retrieving allocations information for date {0}.".format(date))
1286        res = requests.get(url, params=params, headers=headers, **self._request_params)
1287        if res.ok:
1288            logger.info("Allocations retrieval successful.")
1289            return res.json()
1290        else:
1291            error_msg = self._try_extract_error_code(res)
1292            raise BoostedAPIException("Failed to retrieve allocations: {0}.".format(error_msg))
1293
1294    def getAllocationsByDates(self, portfolio_id, dates=None):
1295        url = self.base_uri + "/api/portfolios/{0}/allocationsByDate".format(portfolio_id)
1296        headers = {"Authorization": "ApiKey " + self.api_key}
1297        if dates is not None:
1298            fmt_dates = []
1299            for d in dates:
1300                fmt_dates.append(self.__iso_format(d))
1301            fmt_dates_str = ",".join(fmt_dates)
1302            params: Dict = {"dates": fmt_dates_str}
1303            logger.info("Retrieving allocations information for dates {0}.".format(fmt_dates))
1304        else:
1305            params = {"dates": None}
1306            logger.info("Retrieving allocations information for all dates")
1307        res = requests.get(url, params=params, headers=headers, **self._request_params)
1308        if res.ok:
1309            logger.info("Allocations retrieval successful.")
1310            return res.json()
1311        else:
1312            error_msg = self._try_extract_error_code(res)
1313            raise BoostedAPIException("Failed to retrieve allocations: {0}.".format(error_msg))
1314
1315    def getSignalsForDate(self, portfolio_id, date, rollback_to_last_available_date):
1316        date = self.__iso_format(date)
1317        endpoint = "latest-signals" if rollback_to_last_available_date else "signals"
1318        url = self.base_uri + "/api/portfolios/{0}/{1}".format(portfolio_id, endpoint)
1319        headers = {"Authorization": "ApiKey " + self.api_key}
1320        params = {"date": date}
1321        logger.info("Retrieving signals information for date {0}.".format(date))
1322        res = requests.get(url, params=params, headers=headers, **self._request_params)
1323        if res.ok:
1324            logger.info("Signals retrieval successful.")
1325            return res.json()
1326        else:
1327            error_msg = self._try_extract_error_code(res)
1328            raise BoostedAPIException("Failed to retrieve signals: {0}.".format(error_msg))
1329
1330    def getSignalsForAllDates(self, portfolio_id, dates=None):
1331        url = self.base_uri + "/api/portfolios/{0}/signalsByDate".format(portfolio_id)
1332        headers = {"Authorization": "ApiKey " + self.api_key}
1333        params = {}
1334        if dates is not None:
1335            fmt_dates = []
1336            for d in dates:
1337                fmt_dates.append(self.__iso_format(d))
1338            fmt_dates_str = ",".join(fmt_dates)
1339            params = {"dates": fmt_dates_str}
1340            logger.info("Retrieving signals information for dates {0}.".format(fmt_dates))
1341        else:
1342            params = {"dates": None}
1343            logger.info("Retrieving signals information for all dates")
1344        res = requests.get(url, params=params, headers=headers, **self._request_params)
1345        if res.ok:
1346            logger.info("Signals retrieval successful.")
1347            return res.json()
1348        else:
1349            error_msg = self._try_extract_error_code(res)
1350            raise BoostedAPIException("Failed to retrieve signals: {0}.".format(error_msg))
1351
1352    def getEquityAccuracy(
1353        self,
1354        model_id: str,
1355        portfolio_id: str,
1356        tickers: List[str],
1357        start_date: Optional[BoostedDate] = None,
1358        end_date: Optional[BoostedDate] = None,
1359    ) -> Dict[str, Dict[str, Any]]:
1360        data: Dict[str, Any] = {}
1361        if start_date is not None:
1362            start_date = convert_date(start_date)
1363            data["startDate"] = start_date.isoformat()
1364        if end_date is not None:
1365            end_date = convert_date(end_date)
1366            data["endDate"] = end_date.isoformat()
1367
1368        if start_date and end_date:
1369            validate_start_and_end_dates(start_date, end_date)
1370
1371        tickers_stream = ",".join(tickers)
1372        data["tickers"] = tickers_stream
1373        data["timestamp"] = time.strftime("%H:%M:%S")
1374        data["shouldRecalc"] = True
1375        url = self.base_uri + f"/api/analysis/equity-accuracy/{model_id}/{portfolio_id}"
1376        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
1377
1378        logger.info(
1379            f"Retrieving equity accuracy data for date range {start_date} to {end_date} "
1380            f"for tickers: {tickers}."
1381        )
1382
1383        # Now create dataframes from the JSON output.
1384        metrics = [
1385            "hit_rate_mean",
1386            "hit_rate_median",
1387            "excess_return_mean",
1388            "excess_return_median",
1389            "return",
1390            "excess_return",
1391        ]
1392
1393        # send the request, retry if failed
1394        MAX_RETRIES = 10  # max of number of retries until timeout
1395        SLEEP_TIME = 3  # waiting time between requests
1396
1397        num_retries = 0
1398        success = False
1399        while not success and num_retries < MAX_RETRIES:
1400            res = requests.post(url, data=json.dumps(data), headers=headers, **self._request_params)
1401            if res.ok:
1402                logger.info("Equity Accuracy Data retrieval successful.")
1403                info = res.json()
1404                success = True
1405            else:
1406                data["shouldRecalc"] = False
1407                num_retries += 1
1408                time.sleep(SLEEP_TIME)
1409
1410        if not success:
1411            raise BoostedAPIException("Failed to retrieve equity accuracy: Request timeout.")
1412
1413        for ticker, accuracy_data in info.items():
1414            for metric in metrics:
1415                metric_matrix = accuracy_data[metric]
1416                if not isinstance(metric_matrix, str):
1417                    # Set the index to the quintile label, and remove it from the data
1418                    index = []
1419                    for row in metric_matrix[1:]:
1420                        index.append(row.pop(0))
1421
1422                    # columns are "1D", "5D", etc.
1423                    df = pd.DataFrame(metric_matrix[1:], columns=metric_matrix[0][1:], index=index)
1424                    accuracy_data[metric] = df
1425        return info
1426
1427    def getHistoricalTradeDates(self, portfolio_id, start_date=None, end_date=None):
1428        end_date = self.__to_date_obj(end_date or datetime.date.today())
1429        start_date = self.__iso_format(start_date or (end_date - timedelta(days=365)))
1430        end_date = self.__iso_format(end_date)
1431
1432        url = self.base_uri + "/api/portfolios/{0}/tradingDates".format(portfolio_id)
1433        headers = {"Authorization": "ApiKey " + self.api_key}
1434        params = {"startDate": start_date, "endDate": end_date}
1435
1436        logger.info(
1437            "Retrieving historical trade dates data for date range {0} to {1}.".format(
1438                start_date, end_date
1439            )
1440        )
1441        res = requests.get(url, params=params, headers=headers, **self._request_params)
1442        if res.ok:
1443            logger.info("Trading dates retrieval successful.")
1444            return res.json()["dates"]
1445        else:
1446            error_msg = self._try_extract_error_code(res)
1447            raise BoostedAPIException("Failed to retrieve trading dates: {0}.".format(error_msg))
1448
1449    def getRankingsForAllDates(self, portfolio_id, dates=None):
1450        url = self.base_uri + "/api/portfolios/{0}/rankingsByDate".format(portfolio_id)
1451        headers = {"Authorization": "ApiKey " + self.api_key}
1452        params = {}
1453        if dates is not None:
1454            fmt_dates = []
1455            for d in dates:
1456                fmt_dates.append(self.__iso_format(d))
1457            fmt_dates_str = ",".join(fmt_dates)
1458            params = {"dates": fmt_dates_str}
1459            logger.info("Retrieving rankings information for date {0}.".format(fmt_dates_str))
1460        else:
1461            params = {"dates": None}
1462            logger.info("Retrieving rankings information for all dates")
1463        res = requests.get(url, params=params, headers=headers, **self._request_params)
1464        if res.ok:
1465            logger.info("Rankings retrieval successful.")
1466            return res.json()
1467        else:
1468            error_msg = self._try_extract_error_code(res)
1469            raise BoostedAPIException("Failed to retrieve rankings: {0}.".format(error_msg))
1470
1471    def getRankingsForDate(self, portfolio_id, date, rollback_to_last_available_date):
1472        date = self.__iso_format(date)
1473        endpoint = "latest-rankings" if rollback_to_last_available_date else "rankings"
1474        url = self.base_uri + "/api/{0}/{1}/{2}".format(endpoint, portfolio_id, date)
1475        headers = {"Authorization": "ApiKey " + self.api_key}
1476        logger.info("Retrieving rankings information for date {0}.".format(date))
1477        res = requests.get(url, headers=headers, **self._request_params)
1478        if res.ok:
1479            logger.info("Rankings retrieval successful.")
1480            return res.json()
1481        else:
1482            error_msg = self._try_extract_error_code(res)
1483            raise BoostedAPIException("Failed to retrieve rankings: {0}.".format(error_msg))
1484
1485    def sendModelRecalc(self, model_id):
1486        url = self.base_uri + "/api/models/{0}/recalc".format(model_id)
1487        logger.info("Sending model recalc request for model {0}".format(model_id))
1488        headers = {"Authorization": "ApiKey " + self.api_key}
1489        res = requests.put(url, headers=headers, **self._request_params)
1490        if not res.ok:
1491            error_msg = self._try_extract_error_code(res)
1492            logger.error(error_msg)
1493            raise BoostedAPIException(
1494                "Failed to send model recalc request - "
1495                + "the model in UI may be out of date: {0}.".format(error_msg)
1496            )
1497
1498    def sendRecalcAllModelPortfolios(self, model_id: str):
1499        """Recalculates all portfolios under a given model ID.
1500
1501        Args:
1502            model_id: the model ID
1503        Raises:
1504            BoostedAPIException: if the Boosted API request fails
1505        """
1506        url = self.base_uri + f"/api/models/{model_id}/recalc-all-portfolios"
1507        logger.info(f"Sending portfolio recalc requests for all portfolios under {model_id=}.")
1508        headers = {"Authorization": "ApiKey " + self.api_key}
1509        res = requests.put(url, headers=headers, **self._request_params)
1510        if not res.ok:
1511            error_msg = self._try_extract_error_code(res)
1512            logger.error(error_msg)
1513            raise BoostedAPIException(
1514                f"Failed to send recalc request for all portfolios under {model_id=} - {error_msg}."
1515            )
1516
1517    def sendPortfolioRecalc(self, portfolio_id: str):
1518        """Recalculates a single portfolio by its portfolio ID.
1519
1520        Args:
1521            portfolio_id: the portfolio ID to recalculate
1522        Raises:
1523            BoostedAPIException: if the Boosted API request fails
1524        """
1525        url = self.base_uri + "/api/graphql"
1526        logger.info(f"Sending portfolio recalc request for {portfolio_id=}.")
1527        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
1528        qry = """
1529            mutation recalcPortfolio($input: RecalculatePortfolioInput!) {
1530                recalculatePortfolio(input: $input) {
1531                    success
1532                    errors
1533                }
1534            }
1535            """
1536        req_json = {
1537            "query": qry,
1538            "variables": {"input": {"portfolioId": f"{portfolio_id}", "allowForceRecalc": "true"}},
1539        }
1540        res = requests.post(url, json=req_json, headers=headers, **self._request_params)
1541        if not res.ok or res.json().get("errors"):
1542            error_msg = self._try_extract_error_code(res)
1543            logger.error(error_msg)
1544            raise BoostedAPIException(
1545                f"Failed to send portfolio recalc request for {portfolio_id=} - {error_msg}."
1546            )
1547
1548    def add_uploaded_model_data(self, url, csv_data, request_data, timeout=600):
1549        logger.info("Starting upload.")
1550        headers = {"Authorization": "ApiKey " + self.api_key}
1551        files_req: Dict = {}
1552        target: Tuple[str, Any, str] = ("data.csv", None, "text/csv")
1553        warnings = []
1554        if isinstance(csv_data, pd.core.frame.DataFrame):
1555            buf = io.StringIO()
1556            csv_data.to_csv(buf, header=False)
1557            if not isinstance(csv_data.index, pd.core.indexes.datetimes.DatetimeIndex):
1558                raise BoostedAPIException("DataFrame must have DatetimeIndex as index type.")
1559            target = ("uploaded_data.csv", buf.getvalue(), "text/csv")
1560            files_req["dataFile"] = target
1561            res = requests.post(
1562                url,
1563                files=files_req,
1564                data=request_data,
1565                headers=headers,
1566                timeout=timeout,
1567                **self._request_params,
1568            )
1569        elif isinstance(csv_data, str):
1570            target = ("uploaded_data.csv", csv_data, "text/csv")
1571            files_req["dataFile"] = target
1572            res = requests.post(
1573                url,
1574                files=files_req,
1575                data=request_data,
1576                headers=headers,
1577                timeout=timeout,
1578                **self._request_params,
1579            )
1580        else:
1581            raise BoostedAPIException("Expected CSV as str or Pandas DataFrame.")
1582        if res.ok:
1583            logger.info("Signals upload completed.")
1584            result = res.json()["result"]
1585            if "warningMessages" in result:
1586                warnings = result["warningMessages"]
1587        else:
1588            error_str = "Signals upload failed: {0}, {1}".format(res.text, res.reason)
1589            logger.error(error_str)
1590            raise BoostedAPIException(error_str)
1591
1592        return res, warnings
1593
1594    def createSignalsModel(self, csv_data, model_name, timeout=600):
1595        warnings = []
1596        url = self.base_uri + "/api/models/upload/signals/create"
1597        request_data = {"modelName": model_name, "uploadName": model_name}
1598        res, warnings = self.add_uploaded_model_data(url, csv_data, request_data, timeout)
1599        result = res.json()["result"]
1600        model_id = result["modelId"]
1601        self.sendModelRecalc(model_id)
1602        return model_id, warnings
1603
1604    def addToUploadedModel(self, model_id, csv_data, timeout=600, recalc_model=True):
1605        warnings = []
1606        url = self.base_uri + "/api/models/{0}/upload/add-data".format(model_id)
1607        request_data: Dict = {}
1608        _, warnings = self.add_uploaded_model_data(url, csv_data, request_data, timeout)
1609        if recalc_model:
1610            self.sendModelRecalc(model_id)
1611        return warnings
1612
1613    def addSignalsToUploadedModel(
1614        self,
1615        model_id: str,
1616        csv_data: Union[pd.core.frame.DataFrame, str],
1617        timeout: int = 600,
1618        recalc_all: bool = False,
1619        recalc_portfolio_ids: Optional[List[str]] = None,
1620    ) -> List[str]:
1621        """
1622        Add signals to an uploaded model and then recalculate a random portfolio under that model.
1623
1624        Args:
1625            model_id: model ID
1626            csv_data: pandas DataFrame, or a string with signals to upload.
1627            timeout (optional): Timeout for initial upload request in seconds.
1628            recalc_all (optional): if True, recalculates all portfolios in the model.
1629            recalc_portfolio_ids (optional): List of portfolio IDs under the model to re-calculate.
1630        """
1631        warnings = self.addToUploadedModel(model_id, csv_data, timeout, recalc_model=False)
1632
1633        if recalc_all:
1634            self.sendRecalcAllModelPortfolios(model_id)
1635        elif recalc_portfolio_ids:
1636            for portfolio_id in recalc_portfolio_ids:
1637                self.sendPortfolioRecalc(portfolio_id)
1638        else:
1639            self.sendModelRecalc(model_id)
1640        return warnings
1641
1642    def getSignalsFromUploadedModel(self, model_id, date=None):
1643        date = self.__iso_format(date)
1644        url = self.base_uri + "/api/models/{0}/upload/signals".format(model_id)
1645        headers = {"Authorization": "ApiKey " + self.api_key}
1646        params = {"date": date}
1647        logger.info("Retrieving uploaded signals information")
1648        res = requests.get(url, params=params, headers=headers, **self._request_params)
1649        if res.ok:
1650            result = pd.DataFrame.from_dict(res.json()["result"])
1651            # ensure column order
1652            result = result[["date", "isin", "country", "currency", "weight"]]
1653            result["date"] = pd.to_datetime(result["date"], format="%Y-%m-%d")
1654            result = result.set_index("date")
1655            logger.info("Signals retrieval successful.")
1656            return result
1657        else:
1658            error_msg = self._try_extract_error_code(res)
1659            raise BoostedAPIException("Failed to retrieve signals: {0}.".format(error_msg))
1660
1661    def getPortfolioSettings(self, portfolio_id, timeout=600):
1662        url = self.base_uri + "/api/portfolio-settings/{0}".format(portfolio_id)
1663        headers = {"Authorization": "ApiKey " + self.api_key}
1664        res = requests.get(url, headers=headers, **self._request_params)
1665        if res.ok:
1666            return PortfolioSettings(res.json())
1667        else:
1668            error_msg = self._try_extract_error_code(res)
1669            logger.error(error_msg)
1670            raise BoostedAPIException(
1671                "Failed to retrieve portfolio settings: {0}.".format(error_msg)
1672            )
1673
1674    def createPortfolioWithPortfolioSettings(
1675        self, model_id, portfolio_name, portfolio_description, portfolio_settings, timeout=600
1676    ):
1677        url = self.base_uri + "/api/models/{0}/constraints/add".format(model_id)
1678        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
1679        setting_string = json.dumps(portfolio_settings.settings)
1680        logger.info("Creating new portfolio with specified setting: {}".format(setting_string))
1681        params = {
1682            "name": portfolio_name,
1683            "description": portfolio_description,
1684            "settings": setting_string,
1685            "validate": "true",
1686        }
1687        res = requests.put(url, json=params, headers=headers, **self._request_params)
1688        response = res.json()
1689        if res.ok:
1690            return response
1691        else:
1692            error_msg = self._try_extract_error_code(res)
1693            logger.error(error_msg)
1694            raise BoostedAPIException(
1695                "Failed to create portfolio with the specified settings: {0}.".format(error_msg)
1696            )
1697
1698    def getGbiIdFromIdentCountryCurrencyDate(
1699        self, ident_country_currency_dates: List[DateIdentCountryCurrency], timeout: int = 600
1700    ) -> List[Optional[GbiIdSecurity]]:
1701        url = self.base_uri + "/api/custom-stock-data/map-identifiers-simple"
1702        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
1703        identifiers = [
1704            {
1705                "row": idx,
1706                "date": identifier.date,
1707                "isin": identifier.identifier if identifier.id_type == ColumnSubRole.ISIN else None,
1708                "symbol": (
1709                    identifier.identifier if identifier.id_type == ColumnSubRole.SYMBOL else None
1710                ),
1711                "countryPreference": identifier.country,
1712                "currencyPreference": identifier.currency,
1713            }
1714            for idx, identifier in enumerate(ident_country_currency_dates)
1715        ]
1716        params = json.dumps({"identifiers": identifiers})
1717        logger.info(
1718            "Retrieving GBI-ID mapping for {} identifier tuples...".format(
1719                len(ident_country_currency_dates)
1720            )
1721        )
1722        res = requests.post(url, data=params, headers=headers, **self._request_params)
1723
1724        if res.ok:
1725            result = res.json()
1726            warnings = result["warnings"]
1727            if warnings:
1728                for warning in warnings:
1729                    logger.warn(f"Mapping warning: {warning}")
1730            gbiSecurities = []
1731            for idx, ident in enumerate(result["mappedIdentifiers"]):
1732                if ident is None:
1733                    security = None
1734                else:
1735                    security = GbiIdSecurity(
1736                        ident["gbiId"],
1737                        ident_country_currency_dates[idx],
1738                        ident["symbol"],
1739                        ident["companyName"],
1740                    )
1741                gbiSecurities.append(security)
1742
1743            return gbiSecurities
1744        else:
1745            error_msg = self._try_extract_error_code(res)
1746            raise BoostedAPIException(
1747                "Failed to retrieve identifier mappings: {0}.".format(error_msg)
1748            )
1749
1750    # exists for backwards compatibility purposes.
1751    def getGbiIdFromIsinCountryCurrencyDate(self, isin_country_currency_dates, timeout=600):
1752        return self.getGbiIdFromIdentCountryCurrencyDate(
1753            ident_country_currency_dates=isin_country_currency_dates, timeout=timeout
1754        )
1755
1756    # model_id: str
1757    # returns: Dict[str, str] representing the translation from the rankings ID (feature refs)
1758    # to human readable names
1759    def __get_rankings_ref_translation(self, model_id: str) -> Dict[str, str]:
1760        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
1761        feature_name_url = f"/api/models/{model_id}/advanced-explain/translate-feature-ref/"
1762        feature_name_res = requests.post(
1763            self.base_uri + feature_name_url,
1764            data=json.dumps({}),
1765            headers=headers,
1766            **self._request_params,
1767        )
1768
1769        if feature_name_res.ok:
1770            feature_name_dict = feature_name_res.json()
1771            return {
1772                id: "-".join(
1773                    [names["variable_name"], names["transform_name"], names["normalization_name"]]
1774                )
1775                for id, names in feature_name_dict.items()
1776            }
1777        else:
1778            raise Exception(
1779                """Failed to get feature names for model,
1780                    this model doesn't fully support rankings 2.0"""
1781            )
1782
1783    def getDatasetDates(self, dataset_id):
1784        url = self.base_uri + f"/api/datasets/{dataset_id}"
1785        headers = {"Authorization": "ApiKey " + self.api_key}
1786        res = requests.get(url, headers=headers, **self._request_params)
1787        if res.ok:
1788            dataset = res.json()
1789            valid_to_array = dataset.get("validTo")
1790            valid_to_date = None
1791            valid_from_array = dataset.get("validFrom")
1792            valid_from_date = None
1793            if valid_to_array:
1794                valid_to_date = datetime.date(
1795                    valid_to_array[0], valid_to_array[1], valid_to_array[2]
1796                )
1797            if valid_from_array:
1798                valid_from_date = datetime.date(
1799                    valid_from_array[0], valid_from_array[1], valid_from_array[2]
1800                )
1801            return {"validTo": valid_to_date, "validFrom": valid_from_date}
1802        else:
1803            error_msg = self._try_extract_error_code(res)
1804            logger.error(error_msg)
1805            raise BoostedAPIException("Failed to query dataset: {0}.".format(error_msg))
1806
1807    def getRankingAnalysis(self, model_id, date):
1808        url = (
1809            self.base_uri
1810            + f"/api/explain-trades/analysis/{model_id}/{self.__iso_format(date)}/json"
1811        )
1812        headers = {"Authorization": "ApiKey " + self.api_key}
1813        analysis_res = requests.get(url, headers=headers, **self._request_params)
1814        if analysis_res.ok:
1815            ranking_dict = analysis_res.json()
1816            feature_name_dict = self.__get_rankings_ref_translation(model_id)
1817            columns = [feature_name_dict[col] for col in ranking_dict["columns"]]
1818
1819            df = protoCubeJsonDataToDataFrame(
1820                ranking_dict["data"],
1821                "Data Buckets",
1822                ranking_dict["rows"],
1823                "Feature Names",
1824                columns,
1825                ranking_dict["fields"],
1826            )
1827            return df
1828        else:
1829            error_msg = self._try_extract_error_code(analysis_res)
1830            logger.error(error_msg)
1831            raise BoostedAPIException("Failed to get ranking analysis: {0}.".format(error_msg))
1832
1833    def getExplainForPortfolio(
1834        self,
1835        model_id,
1836        portfolio_id,
1837        date,
1838        index_by_symbol: bool = False,
1839        index_by_all_metadata: bool = False,
1840    ):
1841        """
1842        Gets the ranking 2.0 explain data for the given model on the given date
1843        filtered by portfolio.
1844
1845        Parameters
1846        ----------
1847        model_id: str
1848            Model ID.  Model IDs can be retrieved by clicking on the copy to clipboard
1849            button next to your model's name in the Model Summary Page in Boosted
1850            Insights.
1851        portfolio_id: str
1852            Portfolio ID.  Portfolio IDs can be retrieved from portfolio's configuration page.
1853        date: datetime.date or YYYY-MM-DD string
1854            Date of the data to retrieve.
1855        index_by_symbol: bool
1856            If true, index by stock symbol instead of ISIN.
1857        index_by_all_metadata: bool
1858            If true, index by all metadata: ISIN, stock symbol, currency, and country.
1859            Overrides index_by_symbol.
1860
1861        Returns
1862        -------
1863        pandas.DataFrame
1864            Pandas DataFrame containing your data indexed by ISINs/Symbol/all metadata
1865            and feature names, filtered by portfolio.
1866        ___
1867        """
1868        indices = ["Symbol", "ISINs", "Country", "Currency"]
1869        raw_explain_df = self.getRankingExplain(
1870            model_id, date, index_by_symbol=False, index_by_all_metadata=True
1871        )
1872        pa_ratings_dict = self.getRankingsForDate(portfolio_id, date, False)
1873
1874        ratings = pa_ratings_dict["rankings"]
1875        ratings_df = pd.DataFrame(ratings)
1876        ratings_df = ratings_df[["symbol", "isin", "country", "currency"]]
1877        ratings_df.columns = pd.Index(indices)
1878        ratings_df.set_index(indices, inplace=True)
1879
1880        # inner join to only get the securities in both data frames
1881        result_df = raw_explain_df.merge(ratings_df, left_index=True, right_index=True, how="inner")
1882
1883        # set index based on input parameters
1884        if index_by_symbol and not index_by_all_metadata:
1885            result_df = result_df.reset_index()
1886            result_df = result_df.drop(columns=["ISINs", "Currency", "Country"])
1887            result_df.set_index(["Symbol", "Feature Names"], inplace=True)
1888        elif not index_by_symbol and not index_by_all_metadata:
1889            result_df = result_df.reset_index()
1890            result_df = result_df.drop(columns=["Symbol", "Currency", "Country"])
1891            result_df.set_index(["ISINs", "Feature Names"], inplace=True)
1892
1893        return result_df
1894
1895    def getRankingExplain(
1896        self, model_id, date, index_by_symbol: bool = False, index_by_all_metadata: bool = False
1897    ):
1898        """
1899        Gets the ranking 2.0 explain data for the given model on the given date
1900
1901        Parameters
1902        ----------
1903        model_id: str
1904            Model ID.  Model IDs can be retrieved by clicking on the copy to clipboard
1905            button next to your model's name in the Model Summary Page in Boosted
1906            Insights.
1907        date: datetime.date or YYYY-MM-DD string
1908            Date of the data to retrieve.
1909        index_by_symbol: bool
1910            If true, index by stock symbol instead of ISIN.
1911        index_by_all_metadata: bool
1912            If true, index by all metadata: ISIN, stock symbol, currency, and country.
1913            Overrides index_by_symbol.
1914
1915        Returns
1916        -------
1917        pandas.DataFrame
1918            Pandas DataFrame containing your data indexed by ISINs/Symbol/all metadata
1919            and feature names.
1920        ___
1921        """
1922        url = (
1923            self.base_uri + f"/api/explain-trades/explain/{model_id}/{self.__iso_format(date)}/json"
1924        )
1925        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
1926        explain_res = requests.get(url, headers=headers, **self._request_params)
1927        if explain_res.ok:
1928            ranking_dict = explain_res.json()
1929            rows = ranking_dict["rows"]
1930            stock_summary_url = f"/api/stock-summaries/{model_id}"
1931            stock_summary_body = {"gbiIds": ranking_dict["rows"]}
1932            summary_res = requests.post(
1933                self.base_uri + stock_summary_url,
1934                data=json.dumps(stock_summary_body),
1935                headers=headers,
1936                **self._request_params,
1937            )
1938            if summary_res.ok:
1939                stock_summary = summary_res.json()
1940                if index_by_symbol:
1941                    rows = [stock_summary[row]["symbol"] for row in ranking_dict["rows"]]
1942                elif index_by_all_metadata:
1943                    rows = [
1944                        [
1945                            stock_summary[row]["isin"],
1946                            stock_summary[row]["symbol"],
1947                            stock_summary[row]["currency"],
1948                            stock_summary[row]["country"],
1949                        ]
1950                        for row in ranking_dict["rows"]
1951                    ]
1952                else:
1953                    rows = [stock_summary[row]["isin"] for row in ranking_dict["rows"]]
1954            else:
1955                error_msg = self._try_extract_error_code(summary_res)
1956                logger.error(error_msg)
1957                raise BoostedAPIException(
1958                    "Failed to get isin information ranking explain: {0}.".format(error_msg)
1959                )
1960
1961            feature_name_dict = self.__get_rankings_ref_translation(model_id)
1962            columns = [feature_name_dict[col] for col in ranking_dict["columns"]]
1963
1964            id_col_name = "Symbols" if index_by_symbol else "ISINs"
1965
1966            if index_by_all_metadata:
1967                pc_list = []
1968                pf = ranking_dict["data"]
1969                for row_idx, row in enumerate(rows):
1970                    for col_idx, col in enumerate(columns):
1971                        pc_list.append([row, col] + pf[row_idx]["columns"][col_idx]["fields"])
1972                df = pd.DataFrame(pc_list)
1973                df = df.set_axis(
1974                    ["Metadata", "Feature Names"] + ranking_dict["fields"], axis="columns"
1975                )
1976
1977                metadata_df = df["Metadata"].apply(pd.Series)
1978                metadata_df.columns = pd.Index(["ISINs", "Symbol", "Currency", "Country"])
1979                result_df = pd.concat([metadata_df, df], axis=1).drop("Metadata", axis=1)
1980                result_df.set_index(
1981                    ["ISINs", "Symbol", "Currency", "Country", "Feature Names"], inplace=True
1982                )
1983                return result_df
1984
1985            else:
1986                df = protoCubeJsonDataToDataFrame(
1987                    ranking_dict["data"],
1988                    id_col_name,
1989                    rows,
1990                    "Feature Names",
1991                    columns,
1992                    ranking_dict["fields"],
1993                )
1994
1995                return df
1996        else:
1997            error_msg = self._try_extract_error_code(explain_res)
1998            logger.error(error_msg)
1999            raise BoostedAPIException("Failed to get ranking explain: {0}.".format(error_msg))
2000
2001    def getDenseSignalsForDate(self, portfolio_id, date, rollback_to_last_available_date):
2002        date = self.__iso_format(date)
2003        url = self.base_uri + f"/api/portfolios/{portfolio_id}/denseSignalsByDate"
2004        headers = {"Authorization": "ApiKey " + self.api_key}
2005        params = {
2006            "startDate": date,
2007            "endDate": date,
2008            "rollbackToMostRecentDate": rollback_to_last_available_date,
2009        }
2010        logger.info("Retrieving dense signals information for date {0}.".format(date))
2011        res = requests.get(url, params=params, headers=headers, **self._request_params)
2012        if res.ok:
2013            logger.info("Signals retrieval successful.")
2014            d = res.json()
2015            # reshape date to output format
2016            date = list(d["signals"].keys())[0]
2017            model_id = d["model_id"]
2018            signals_list = list(d["signals"].values())[0]
2019            return {"date": date, "signals": [{"model_id": model_id, "signals_info": signals_list}]}
2020        else:
2021            error_msg = self._try_extract_error_code(res)
2022            raise BoostedAPIException("Failed to retrieve dense signals: {0}.".format(error_msg))
2023
2024    def getDenseSignals(self, model_id, portfolio_id, file_name=None, location="./"):
2025        url = self.base_uri + f"/api/models/{model_id}/{portfolio_id}/dense-signals"
2026        headers = {"Authorization": "ApiKey " + self.api_key}
2027        res = requests.get(url, headers=headers, **self._request_params)
2028        if file_name is None:
2029            file_name = f"{model_id}-{portfolio_id}_dense_signals.csv"
2030        download_location = os.path.join(location, file_name)
2031        if res.ok:
2032            with open(download_location, "wb") as file:
2033                file.write(res.content)
2034            print("Download Complete")
2035        elif res.status_code == 404:
2036            raise BoostedAPIException(
2037                f"""Dense Singals file does not exist for model:
2038                 {model_id} - portfolio: {portfolio_id}"""
2039            )
2040        else:
2041            error_msg = self._try_extract_error_code(res)
2042            logger.error(error_msg)
2043            raise BoostedAPIException(
2044                f"""Failed to download dense singals file for model:
2045                 {model_id} - portfolio: {portfolio_id}"""
2046            )
2047
2048    def _getIsPortfolioReadyForProcessing(self, model_id, portfolio_id, formatted_date):
2049        headers = {"Authorization": "ApiKey " + self.api_key}
2050        url = (
2051            self.base_uri
2052            + f"/api/explain-trades/{model_id}/{portfolio_id}"
2053            + f"/is-ready-for-processing/{formatted_date}"
2054        )
2055        res = requests.get(url, headers=headers, **self._request_params)
2056
2057        try:
2058            if res.ok:
2059                body = res.json()
2060                if "ready" in body:
2061                    if body["ready"]:
2062                        return True, ""
2063                    else:
2064                        reason_from_api = (
2065                            body["notReadyReason"] if "notReadyReason" in body else "Unavailable"
2066                        )
2067
2068                        returned_reason = reason_from_api
2069
2070                        if returned_reason == "SKIP":
2071                            returned_reason = "holiday- market closed"
2072
2073                        if returned_reason == "WAITING":
2074                            returned_reason = "calculations pending"
2075
2076                        return False, returned_reason
2077                else:
2078                    return False, "Unavailable"
2079            else:
2080                error_msg = self._try_extract_error_code(res)
2081                logger.error(error_msg)
2082                raise BoostedAPIException(
2083                    f"""Failed to generate file for model:
2084                    {model_id} - portfolio: {portfolio_id} on date: {formatted_date}"""
2085                )
2086        except Exception as e:
2087            raise BoostedAPIException(
2088                f"""Failed to generate file for model:
2089                {model_id} - portfolio: {portfolio_id} on date: {formatted_date} {e}"""
2090            )
2091
2092    def getRanking2DateAnalysisFile(
2093        self, model_id, portfolio_id, date, file_name=None, location="./"
2094    ):
2095        formatted_date = self.__iso_format(date)
2096        s3_file_name = f"{formatted_date}_analysis.xlsx"
2097        download_url = (
2098            self.base_uri + f"/api/models/{model_id}/{portfolio_id}/ranking-file/{s3_file_name}"
2099        )
2100        headers = {"Authorization": "ApiKey " + self.api_key}
2101        if file_name is None:
2102            file_name = f"{model_id}-{portfolio_id}_statistical_analysis_{formatted_date}.xlsx"
2103        download_location = os.path.join(location, file_name)
2104
2105        res = requests.get(download_url, headers=headers, **self._request_params)
2106        if res.ok:
2107            with open(download_location, "wb") as file:
2108                file.write(res.content)
2109            print("Download Complete")
2110        elif res.status_code == 404:
2111            (
2112                is_portfolio_ready_for_processing,
2113                portfolio_ready_status,
2114            ) = self._getIsPortfolioReadyForProcessing(model_id, portfolio_id, formatted_date)
2115
2116            if not is_portfolio_ready_for_processing:
2117                logger.info(
2118                    f"""\nPortfolio {portfolio_id} for model {model_id}
2119                    on date {date} unavailable for Ranking2Date Analysis file.
2120                    Status: {portfolio_ready_status}\n"""
2121                )
2122                return
2123
2124            generate_url = (
2125                self.base_uri
2126                + f"/api/explain-trades/{model_id}/{portfolio_id}"
2127                + f"/generate/date-data/{formatted_date}"
2128            )
2129
2130            generate_res = requests.get(generate_url, headers=headers, **self._request_params)
2131            if generate_res.ok:
2132                download_res = requests.get(download_url, headers=headers, **self._request_params)
2133                while download_res.status_code == 404 or (
2134                    download_res.ok and len(download_res.content) == 0
2135                ):
2136                    print("waiting for file to be generated")
2137                    time.sleep(5)
2138                    download_res = requests.get(
2139                        download_url, headers=headers, **self._request_params
2140                    )
2141                if download_res.ok:
2142                    with open(download_location, "wb") as file:
2143                        file.write(download_res.content)
2144                    print("Download Complete")
2145            else:
2146                error_msg = self._try_extract_error_code(res)
2147                logger.error(error_msg)
2148                raise BoostedAPIException(
2149                    f"""Failed to generate ranking analysis file for model:
2150                    {model_id} - portfolio: {portfolio_id} on date: {formatted_date}"""
2151                )
2152        else:
2153            error_msg = self._try_extract_error_code(res)
2154            logger.error(error_msg)
2155            raise BoostedAPIException(
2156                f"""Failed to download ranking analysis file for model:
2157                 {model_id} - portfolio: {portfolio_id} on date: {formatted_date}"""
2158            )
2159
2160    def getRanking2DateExplainFile(
2161        self,
2162        model_id,
2163        portfolio_id,
2164        date,
2165        file_name=None,
2166        location="./",
2167        overwrite: bool = False,
2168        index_by_all_metadata: bool = False,
2169    ):
2170        """
2171        Downloads the ranking explain file for the provided portfolio and model.
2172        If no file exists then it will send a request to generate the file and continuously
2173        poll the server every 5 seconds to try and download the file until the file is downloaded.
2174
2175        Parameters
2176        ----------
2177        model_id: str
2178            Model ID.  Model IDs can be retrieved by clicking on the copy to clipboard
2179            button next to your model's name in the Model Summary Page in Boosted
2180            Insights.
2181        portfolio_id: str
2182            Portfolio ID.  Portfolio IDs can be retrieved from portfolio's configuration page.
2183        date: datetime.date or YYYY-MM-DD string
2184            Date of the data to retrieve.
2185        file_name: str
2186            File name of the dense signals file to save as.
2187            If no file name is given the file name will be
2188            "<model_id>-<portfolio_id>_explain_data_<date>.xlsx"
2189        location: str
2190            The location to save the file to.
2191            If no location is given then it will be saved to the current directory.
2192        overwrite: bool
2193            Defaults to False, set to True to regenerate the file.
2194        index_by_all_metadata: bool
2195            If true, index by all metadata: ISIN, stock symbol, currency, and country.
2196
2197
2198        Returns
2199        -------
2200        None
2201        ___
2202        """
2203        formatted_date = self.__iso_format(date)
2204        if index_by_all_metadata:
2205            s3_file_name = f"{formatted_date}_explaindata_withmetadata.xlsx"
2206        else:
2207            s3_file_name = f"{formatted_date}_explaindata.xlsx"
2208        download_url = (
2209            self.base_uri + f"/api/models/{model_id}/{portfolio_id}/ranking-file/{s3_file_name}"
2210        )
2211        headers = {"Authorization": "ApiKey " + self.api_key}
2212        if file_name is None:
2213            file_name = f"{model_id}-{portfolio_id}_explain_data_{formatted_date}.xlsx"
2214        download_location = os.path.join(location, file_name)
2215
2216        if not overwrite:
2217            res = requests.get(download_url, headers=headers, **self._request_params)
2218        if not overwrite and res.ok:
2219            with open(download_location, "wb") as file:
2220                file.write(res.content)
2221            print("Download Complete")
2222        elif overwrite or res.status_code == 404:
2223            (
2224                is_portfolio_ready_for_processing,
2225                portfolio_ready_status,
2226            ) = self._getIsPortfolioReadyForProcessing(model_id, portfolio_id, formatted_date)
2227
2228            if not is_portfolio_ready_for_processing:
2229                logger.info(
2230                    f"""\nPortfolio {portfolio_id} for model {model_id}
2231                    on date {date} unavailable for Ranking2Date Explain file.
2232                    Status: {portfolio_ready_status}\n"""
2233                )
2234                return
2235
2236            generate_url = (
2237                self.base_uri
2238                + f"/api/explain-trades/{model_id}/{portfolio_id}"
2239                + f"/generate/date-data/{formatted_date}"
2240                + f"/{'true' if index_by_all_metadata else 'false'}"
2241            )
2242
2243            generate_res = requests.get(generate_url, headers=headers, **self._request_params)
2244            if generate_res.ok:
2245                download_res = requests.get(download_url, headers=headers, **self._request_params)
2246                while download_res.status_code == 404 or (
2247                    download_res.ok and len(download_res.content) == 0
2248                ):
2249                    print("waiting for file to be generated")
2250                    time.sleep(5)
2251                    download_res = requests.get(
2252                        download_url, headers=headers, **self._request_params
2253                    )
2254                if download_res.ok:
2255                    with open(download_location, "wb") as file:
2256                        file.write(download_res.content)
2257                    print("Download Complete")
2258            else:
2259                error_msg = self._try_extract_error_code(res)
2260                logger.error(error_msg)
2261                raise BoostedAPIException(
2262                    f"""Failed to generate ranking explain file for model:
2263                    {model_id} - portfolio: {portfolio_id} on date: {formatted_date}"""
2264                )
2265        else:
2266            error_msg = self._try_extract_error_code(res)
2267            logger.error(error_msg)
2268            raise BoostedAPIException(
2269                f"""Failed to download ranking explain file for model:
2270                 {model_id} - portfolio: {portfolio_id} on date: {formatted_date}"""
2271            )
2272
2273    def getRanking2DateExplain(
2274        self,
2275        model_id: str,
2276        portfolio_id: str,
2277        date: Optional[datetime.date],
2278        overwrite: bool = False,
2279    ) -> Dict[str, pd.DataFrame]:
2280        """
2281        Wrapper around getRanking2DateExplainFile, but returns a pandas
2282        dataframe instead of downloading to a path. Dataframe is indexed by
2283        symbol and should always have 'rating' and 'rating_delta' columns. Other
2284        columns will be determined by model's features.
2285        """
2286        file_name = "explaindata.xlsx"
2287        with tempfile.TemporaryDirectory() as tmpdirname:
2288            self.getRanking2DateExplainFile(
2289                model_id=model_id,
2290                portfolio_id=portfolio_id,
2291                date=date,
2292                file_name=file_name,
2293                location=tmpdirname,
2294                overwrite=overwrite,
2295            )
2296            full_path = os.path.join(tmpdirname, file_name)
2297            excel_file = pd.ExcelFile(full_path)
2298            df_map = pd.read_excel(excel_file, sheet_name=None)
2299            df_map_final = {str(sheet): df.set_index("Symbol") for (sheet, df) in df_map.items()}
2300
2301        return df_map_final
2302
2303    def getTearSheet(self, model_id, portfolio_id, start_date=None, end_date=None, block=False):
2304        if start_date is None or end_date is None:
2305            if start_date is not None or end_date is not None:
2306                raise ValueError("start_date and end_date must both be None or both be defined")
2307            return self._getCurrentTearSheet(model_id, portfolio_id)
2308
2309        start_date_obj = self.__to_date_obj(start_date)
2310        end_date_obj = self.__to_date_obj(end_date)
2311        if start_date_obj >= end_date_obj:
2312            raise ValueError("end_date must be later than the start_date")
2313
2314        # get for the given date
2315        url = self.base_uri + f"/api/analysis/keyfacts/{model_id}/{portfolio_id}"
2316        data = {
2317            "startDate": self.__iso_format(start_date),
2318            "endDate": self.__iso_format(end_date),
2319            "shouldRecalc": True,
2320        }
2321        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
2322        res = requests.post(url, data=json.dumps(data), headers=headers, **self._request_params)
2323        if res.status_code == 404 and block:
2324            retries = 0
2325            data["shouldRecalc"] = False
2326            while retries < 10:
2327                time.sleep(10)
2328                retries += 1
2329                res = requests.post(
2330                    url, data=json.dumps(data), headers=headers, **self._request_params
2331                )
2332                if res.status_code != 404:
2333                    break
2334        if res.ok:
2335            return res.json()
2336        else:
2337            error_msg = self._try_extract_error_code(res)
2338            logger.error(error_msg)
2339            raise BoostedAPIException(
2340                "Failed to get tear sheet data: {0} {1}.".format(error_msg, str(res.status_code))
2341            )
2342
2343    def _getCurrentTearSheet(self, model_id, portfolio_id):
2344        url = self.base_uri + f"/api/model-summaries/{model_id}/{portfolio_id}"
2345        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
2346        res = requests.get(url, headers=headers, **self._request_params)
2347        if res.ok:
2348            json = res.json()
2349            return json.get("tearSheet", {})
2350        else:
2351            error_msg = self._try_extract_error_code(res)
2352            logger.error(error_msg)
2353            raise BoostedAPIException("Failed to get tear sheet data: {0}.".format(error_msg))
2354
2355    def getPortfolioStatus(self, model_id, portfolio_id, job_date):
2356        url = (
2357            self.base_uri
2358            + f"/api/analysis/portfolioStatus/{model_id}/{portfolio_id}?jobDate={job_date}"
2359        )
2360        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
2361        res = requests.get(url, headers=headers, **self._request_params)
2362        if res.ok:
2363            result = res.json()
2364            return {
2365                "is_complete": result["status"],
2366                "last_update": None if result["lastUpdate"] is None else result["lastUpdate"][:10],
2367                "next_update": None if result["nextUpdate"] is None else result["nextUpdate"][:10],
2368            }
2369        else:
2370            error_msg = self._try_extract_error_code(res)
2371            logger.error(error_msg)
2372            raise BoostedAPIException("Failed to get portfolio status: {0}".format(error_msg))
2373
2374    def _query_portfolio_factor_attribution(
2375        self,
2376        portfolio_id: str,
2377        start_date: Optional[BoostedDate] = None,
2378        end_date: Optional[BoostedDate] = None,
2379    ):
2380        response = self._get_graphql(
2381            query=graphql_queries.GET_PORTFOLIO_FACTOR_ATTRIBUTION_QUERY,
2382            variables={
2383                "portfolioId": portfolio_id,
2384                "startDate": str(start_date) if start_date else None,
2385                "endDate": str(end_date) if start_date else None,
2386            },
2387            error_msg_prefix="Failed to get factor attribution: ",
2388        )
2389        return response
2390
2391    def get_portfolio_factor_attribution(
2392        self,
2393        portfolio_id: str,
2394        start_date: Optional[BoostedDate] = None,
2395        end_date: Optional[BoostedDate] = None,
2396    ):
2397        """Get portfolio factor attribution for a portfolio
2398
2399        Args:
2400            portfolio_id (str): a valid UUID string
2401            start_date (BoostedDate, optional): The start date. Defaults to None.
2402            end_date (BoostedDate, optional): The end date. Defaults to None.
2403        """
2404        response = self._query_portfolio_factor_attribution(portfolio_id, start_date, end_date)
2405        factor_attribution = response["data"]["portfolio"]["factorAttribution"]
2406        dates = pd.DatetimeIndex(data=factor_attribution["dates"])
2407        beta = factor_attribution["factorBetas"]
2408        beta_df = pd.DataFrame(index=dates, data={x["name"]: x["data"] for x in beta})
2409        beta_df = beta_df.add_suffix("_beta")
2410        returns = factor_attribution["portfolioFactorPerformance"]
2411        returns_df = pd.DataFrame(index=dates, data={x["name"]: x["data"] for x in returns})
2412        returns_df = returns_df.add_suffix("_return")
2413        returns_df = (returns_df - 1) * 100
2414
2415        final_df = pd.concat([returns_df, beta_df], axis=1)
2416        ordered_columns = list(itertools.chain(*zip(returns_df.columns, beta_df.columns)))
2417        ordered_final_df = final_df.reindex(columns=ordered_columns)
2418
2419        # Add the column `total_return` which is the sum of returns_data
2420        ordered_final_df["total_return"] = returns_df.sum(axis=1)
2421        return ordered_final_df
2422
2423    def getBlacklist(self, blacklist_id):
2424        url = self.base_uri + f"/api/blacklist/{blacklist_id}"
2425        headers = {"Authorization": "ApiKey " + self.api_key}
2426        res = requests.get(url, headers=headers, **self._request_params)
2427        if res.ok:
2428            result = res.json()
2429            return result
2430        error_msg = self._try_extract_error_code(res)
2431        logger.error(error_msg)
2432        raise BoostedAPIException(f"Failed to get blacklist with id {blacklist_id}: {error_msg}")
2433
2434    def getBlacklists(self, model_id=None, company_id=None, last_N=None):
2435        params = {}
2436        if last_N:
2437            params["lastN"] = last_N
2438        if model_id:
2439            params["modelId"] = model_id
2440        if company_id:
2441            params["companyId"] = company_id
2442        url = self.base_uri + f"/api/blacklist"
2443        headers = {"Authorization": "ApiKey " + self.api_key}
2444        res = requests.get(url, headers=headers, params=params, **self._request_params)
2445        if res.ok:
2446            result = res.json()
2447            return result
2448        error_msg = self._try_extract_error_code(res)
2449        logger.error(error_msg)
2450        raise BoostedAPIException(
2451            f"""Failed to get blacklists with \
2452            model_id {model_id} company_id {company_id} last_N {last_N}: {error_msg}"""
2453        )
2454
2455    def createBlacklist(
2456        self,
2457        isin,
2458        long_short=2,
2459        start_date=datetime.date.today(),
2460        end_date="4000-01-01",
2461        model_id=None,
2462    ):
2463        url = self.base_uri + f"/api/blacklist"
2464        data = {
2465            "modelId": model_id,
2466            "isin": isin,
2467            "longShort": long_short,
2468            "startDate": self.__iso_format(start_date),
2469            "endDate": self.__iso_format(end_date),
2470        }
2471        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
2472        res = requests.post(url, data=json.dumps(data), headers=headers, **self._request_params)
2473        if res.ok:
2474            return res.json()
2475        else:
2476            error_msg = self._try_extract_error_code(res)
2477            logger.error(error_msg)
2478            raise BoostedAPIException(
2479                f"""Failed to create the blacklist with \
2480                  isin {isin} long_short {long_short} start_date {start_date} end_date {end_date} \
2481                  model_id {model_id}: {error_msg}."""
2482            )
2483
2484    def createBlacklistsFromCSV(self, csv_name):
2485        url = self.base_uri + f"/api/blacklists"
2486        data = []
2487        with open(csv_name, mode="r") as f:
2488            csv_reader = csv.DictReader(f)
2489            for row in csv_reader:
2490                blacklist = {"modelId": row["ModelID"], "isin": row["ISIN"]}
2491                if not row.get("LongShort"):
2492                    blacklist["longShort"] = 2
2493                else:
2494                    blacklist["longShort"] = row["LongShort"]
2495
2496                if not row.get("StartDate"):
2497                    blacklist["startDate"] = self.__iso_format(datetime.date.today())
2498                else:
2499                    blacklist["startDate"] = self.__iso_format(row["StartDate"])
2500
2501                if not row.get("EndDate"):
2502                    blacklist["endDate"] = self.__iso_format("4000-01-01")
2503                else:
2504                    blacklist["endDate"] = self.__iso_format(row["EndDate"])
2505                data.append(blacklist)
2506        print(f"Processed {len(data)} blacklists.")
2507        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
2508        res = requests.post(url, data=json.dumps(data), headers=headers, **self._request_params)
2509        if res.ok:
2510            return res.json()
2511        else:
2512            error_msg = self._try_extract_error_code(res)
2513            logger.error(error_msg)
2514            raise BoostedAPIException("failed to create blacklists")
2515
2516    def updateBlacklist(self, blacklist_id, long_short=None, start_date=None, end_date=None):
2517        params = {}
2518        if long_short:
2519            params["longShort"] = long_short
2520        if start_date:
2521            params["startDate"] = start_date
2522        if end_date:
2523            params["endDate"] = end_date
2524        url = self.base_uri + f"/api/blacklist/{blacklist_id}"
2525        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
2526        res = requests.patch(url, json=params, headers=headers, **self._request_params)
2527        if res.ok:
2528            return res.json()
2529        else:
2530            error_msg = self._try_extract_error_code(res)
2531            logger.error(error_msg)
2532            raise BoostedAPIException(
2533                f"Failed to update blacklist with id {blacklist_id}: {error_msg}"
2534            )
2535
2536    def deleteBlacklist(self, blacklist_id):
2537        url = self.base_uri + f"/api/blacklist/{blacklist_id}"
2538        headers = {"Authorization": "ApiKey " + self.api_key}
2539        res = requests.delete(url, headers=headers, **self._request_params)
2540        if res.ok:
2541            result = res.json()
2542            return result
2543        else:
2544            error_msg = self._try_extract_error_code(res)
2545            logger.error(error_msg)
2546            raise BoostedAPIException(
2547                f"Failed to delete blacklist with id {blacklist_id}: {error_msg}"
2548            )
2549
2550    def getFeatureImportance(self, model_id, date, N=None):
2551        url = self.base_uri + f"/api/analysis/explainability/{model_id}"
2552        headers = {"Authorization": "ApiKey " + self.api_key}
2553        logger.info("Retrieving rankings information for date {0}.".format(date))
2554        res = requests.get(url, headers=headers, **self._request_params)
2555        if not res.ok:
2556            error_msg = self._try_extract_error_code(res)
2557            logger.error(error_msg)
2558            raise BoostedAPIException(
2559                f"Failed to fetch feature importance for model/portfolio {model_id}: {error_msg}"
2560            )
2561
2562        json_data = res.json()
2563        if "all" not in json_data.keys() or not json_data["all"]:
2564            raise BoostedAPIException(f"Unexpected formatting of feature importance response")
2565
2566        feature_data = json_data["all"]
2567        # find the right period (assuming returned json has dates in descending order)
2568        date_obj = self.__to_date_obj(date)
2569        start_date_for_return_data = self.__to_date_obj(feature_data[0]["date"])
2570        features_for_requested_period = None
2571
2572        if date_obj > start_date_for_return_data:
2573            features_for_requested_period = feature_data[0]["variable"]
2574        else:
2575            i = 0
2576            while i < len(feature_data) - 1:
2577                current_date = self.__to_date_obj(feature_data[i]["date"])
2578                next_date = self.__to_date_obj(feature_data[i + 1]["date"])
2579                if next_date <= date_obj <= current_date:
2580                    features_for_requested_period = feature_data[i + 1]["variable"]
2581                    start_date_for_return_data = next_date
2582                    break
2583                i += 1
2584
2585        if features_for_requested_period is None:
2586            raise BoostedAPIException(f"No feature data was found for requested date: {date_obj}")
2587
2588        features_for_requested_period.sort(key=lambda x: x["value"], reverse=True)
2589
2590        if type(N) is int and N > 0:
2591            df = pd.DataFrame.from_dict(features_for_requested_period[0:N])
2592        else:
2593            df = pd.DataFrame.from_dict(features_for_requested_period)
2594        result = df[["feature", "value"]]
2595
2596        return result.rename(columns={"feature": f"feature ({start_date_for_return_data})"})
2597
2598    def getAllModelNames(self) -> Dict[str, str]:
2599        url = f"{self.base_uri}/api/graphql"
2600        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
2601        req_json = {"query": "query listOfModels {\n models { id name }}", "variables": {}}
2602        res = requests.post(url, json=req_json, headers=headers, **self._request_params)
2603        if not res.ok:
2604            error_msg = self._try_extract_error_code(res)
2605            logger.error(error_msg)
2606            raise BoostedAPIException(f"Failed to get user models: {error_msg}")
2607        data = res.json()
2608        if data["data"]["models"] is None:
2609            return {}
2610        return {rec["id"]: rec["name"] for rec in data["data"]["models"]}
2611
2612    def getAllModelDetails(self) -> Dict[str, Dict[str, Any]]:
2613        url = f"{self.base_uri}/api/graphql"
2614        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
2615        req_json = {
2616            "query": "query listOfModels {\n models { id name lastUpdated portfolios { id name }}}",
2617            "variables": {},
2618        }
2619        res = requests.post(url, json=req_json, headers=headers, **self._request_params)
2620        if not res.ok:
2621            error_msg = self._try_extract_error_code(res)
2622            logger.error(error_msg)
2623            raise BoostedAPIException(f"Failed to get user models: {error_msg}")
2624        data = res.json()
2625        if data["data"]["models"] is None:
2626            return {}
2627
2628        output_data = {}
2629        for rec in data["data"]["models"]:
2630            model_id = rec["id"]
2631            output_data[model_id] = {
2632                "name": rec["name"],
2633                "last_updated": parser.parse(rec["lastUpdated"]),
2634                "portfolios": rec["portfolios"],
2635            }
2636
2637        return output_data
2638
2639    def get_hedge_experiments(self):
2640        url = self.base_uri + "/api/graphql"
2641        qry = """
2642            query getHedgeExperiments {
2643                hedgeExperiments {
2644                    hedgeExperimentId
2645                    experimentName
2646                    userId
2647                    config
2648                    description
2649                    experimentType
2650                    lastCalculated
2651                    lastModified
2652                    status
2653                    portfolioCalcStatus
2654                    targetSecurities {
2655                        gbiId
2656                        security {
2657                            gbiId
2658                            symbol
2659                            name
2660                        }
2661                        weight
2662                    }
2663                    targetPortfolios {
2664                        portfolioId
2665                    }
2666                    baselineModel {
2667                        id
2668                        name
2669
2670                    }
2671                    baselineScenario {
2672                        hedgeExperimentScenarioId
2673                        scenarioName
2674                        description
2675                        portfolioSettingsJson
2676                        hedgeExperimentPortfolios {
2677                            portfolio {
2678                                id
2679                                name
2680                                modelId
2681                                performanceGridHeader
2682                                performanceGrid
2683                                status
2684                                tearSheet {
2685                                    groupName
2686                                    members {
2687                                        name
2688                                        value
2689                                    }
2690                                }
2691                            }
2692                        }
2693                        status
2694                    }
2695                    baselineStockUniverseId
2696                }
2697            }
2698        """
2699
2700        headers = {"Authorization": "ApiKey " + self.api_key}
2701        resp = requests.post(url, json={"query": qry}, headers=headers, params=self._request_params)
2702
2703        json_resp = resp.json()
2704        # graphql endpoints typically return 200 or 400 status codes, so we must
2705        # check if we have any errors, even with a 200
2706        if (resp.ok and "errors" in json_resp) or not resp.ok:
2707            error_msg = self._try_extract_error_code(resp)
2708            logger.error(error_msg)
2709            raise BoostedAPIException(
2710                (f"Failed to get hedge experiments: {resp.status_code=}; {error_msg=}")
2711            )
2712
2713        json_experiments = resp.json()["data"]["hedgeExperiments"]
2714        experiments = [HedgeExperiment.from_json_dict(exp_json) for exp_json in json_experiments]
2715        return experiments
2716
2717    def get_hedge_experiment_details(self, experiment_id: str):
2718        url = self.base_uri + "/api/graphql"
2719        qry = """
2720            query getHedgeExperimentDetails($hedgeExperimentId: ID!) {
2721                hedgeExperiment(hedgeExperimentId: $hedgeExperimentId) {
2722                ...HedgeExperimentDetailsSummaryListFragment
2723                }
2724            }
2725
2726            fragment HedgeExperimentDetailsSummaryListFragment on HedgeExperiment {
2727                hedgeExperimentId
2728                experimentName
2729                userId
2730                config
2731                description
2732                experimentType
2733                lastCalculated
2734                lastModified
2735                status
2736                portfolioCalcStatus
2737                targetSecurities {
2738                    gbiId
2739                    security {
2740                        gbiId
2741                        symbol
2742                        name
2743                    }
2744                    weight
2745                }
2746                selectedModels {
2747                    id
2748                    name
2749                    stockUniverse {
2750                        name
2751                    }
2752                }
2753                hedgeExperimentScenarios {
2754                    ...experimentScenarioFragment
2755                }
2756                selectedDummyHedgeExperimentModels {
2757                    id
2758                    name
2759                    stockUniverse {
2760                        name
2761                    }
2762                }
2763                targetPortfolios {
2764                    portfolioId
2765                }
2766                baselineModel {
2767                    id
2768                    name
2769
2770                }
2771                baselineScenario {
2772                    hedgeExperimentScenarioId
2773                    scenarioName
2774                    description
2775                    portfolioSettingsJson
2776                    hedgeExperimentPortfolios {
2777                        portfolio {
2778                            id
2779                            name
2780                            modelId
2781                            performanceGridHeader
2782                            performanceGrid
2783                            status
2784                            tearSheet {
2785                                groupName
2786                                members {
2787                                    name
2788                                    value
2789                                }
2790                            }
2791                        }
2792                    }
2793                    status
2794                }
2795                baselineStockUniverseId
2796            }
2797
2798            fragment experimentScenarioFragment on HedgeExperimentScenario {
2799                hedgeExperimentScenarioId
2800                scenarioName
2801                status
2802                description
2803                portfolioSettingsJson
2804                hedgeExperimentPortfolios {
2805                    portfolio {
2806                        id
2807                        name
2808                        modelId
2809                        performanceGridHeader
2810                        performanceGrid
2811                        status
2812                        tearSheet {
2813                            groupName
2814                            members {
2815                                name
2816                                value
2817                            }
2818                        }
2819                    }
2820                }
2821            }
2822        """
2823        headers = {"Authorization": "ApiKey " + self.api_key}
2824        resp = requests.post(
2825            url,
2826            json={"query": qry, "variables": {"hedgeExperimentId": experiment_id}},
2827            headers=headers,
2828            params=self._request_params,
2829        )
2830
2831        json_resp = resp.json()
2832        # graphql endpoints typically return 200 or 400 status codes, so we must
2833        # check if we have any errors, even with a 200
2834        if (resp.ok and "errors" in json_resp) or not resp.ok:
2835            error_msg = self._try_extract_error_code(resp)
2836            logger.error(error_msg)
2837            raise BoostedAPIException(
2838                (
2839                    f"Failed to get hedge experiment results for {experiment_id=}: "
2840                    f"{resp.status_code=}; {error_msg=}"
2841                )
2842            )
2843
2844        json_exp_results = json_resp["data"]["hedgeExperiment"]
2845        if json_exp_results is None:
2846            return None  # issued a request with a non-existent experiment_id
2847        exp_results = HedgeExperimentDetails.from_json_dict(json_exp_results)
2848        return exp_results
2849
2850    def get_portfolio_performance(
2851        self,
2852        portfolio_id: str,
2853        start_date: Optional[datetime.date],
2854        end_date: Optional[datetime.date],
2855        daily_returns: bool,
2856    ) -> pd.DataFrame:
2857        """
2858        Get performance data for a portfolio.
2859
2860        Parameters
2861        ----------
2862        portfolio_id: str
2863            UUID corresponding to the portfolio in question.
2864        start_date: datetime.date
2865            Starting cutoff date to filter performance data
2866        end_date: datetime.date
2867            Ending cutoff date to filter performance data
2868        daily_returns: bool
2869            Flag indicating whether to add a new column with the daily return pct calculated
2870
2871        Returns
2872        -------
2873        pd.DataFrame object
2874            Portfolio and benchmark performance.
2875            -index:
2876                "date": pd.DatetimeIndex
2877            -columns:
2878                "benchmark": benchmark performance, % return
2879                "turnover": portfolio turnover, % of equity
2880                "portfolio": return since beginning of portfolio, % return
2881                "daily_returns": daily percent change in value of the portfolio, % return
2882                                (this column is optional and depends on the daily_returns flag)
2883        """
2884        url = f"{self.base_uri}/api/graphql"
2885        qry = """
2886            query getPortfolioPerformance($portfolioId: ID!) {
2887                portfolio(id: $portfolioId) {
2888                    id
2889                    modelId
2890                    name
2891                    status
2892                    performance {
2893                        benchmark
2894                        date
2895                        turnover
2896                        value
2897                    }
2898                }
2899            }
2900        """
2901
2902        headers = {"Authorization": "ApiKey " + self.api_key}
2903        resp = requests.post(
2904            url,
2905            json={"query": qry, "variables": {"portfolioId": portfolio_id}},
2906            headers=headers,
2907            params=self._request_params,
2908        )
2909
2910        json_resp = resp.json()
2911        # the webserver returns an error for non-ready portfolios, so we have to check
2912        # for this prior to the error check below
2913        pf = json_resp["data"].get("portfolio")
2914        if pf is not None and pf["status"] != "READY":
2915            return pd.DataFrame()
2916
2917        # graphql endpoints typically return 200 or 400 status codes, so we must
2918        # check if we have any errors, even with a 200
2919        if (resp.ok and "errors" in json_resp) or not resp.ok:
2920            error_msg = self._try_extract_error_code(resp)
2921            logger.error(error_msg)
2922            raise BoostedAPIException(
2923                (
2924                    f"Failed to get portfolio performance for {portfolio_id=}: "
2925                    f"{resp.status_code=}; {error_msg=}"
2926                )
2927            )
2928
2929        perf = json_resp["data"]["portfolio"]["performance"]
2930        df = pd.DataFrame(perf).set_index("date").rename(columns={"value": "portfolio"})
2931        df.index = pd.to_datetime(df.index)
2932        if daily_returns:
2933            df["daily_returns"] = pd.to_numeric(df["portfolio"]).pct_change()
2934            df = df.dropna(subset=["daily_returns"])
2935        if start_date:
2936            df = df[df.index >= pd.to_datetime(start_date)]
2937        if end_date:
2938            df = df[df.index <= pd.to_datetime(end_date)]
2939        return df.astype(float)
2940
2941    def _is_portfolio_still_running(self, error_msg: str) -> bool:
2942        # this is jank af. a proper fix of this is either at the webserver
2943        # returning a better response for a portfolio in draft HT2-226, OR
2944        # a bigger refactor of the API that moves to more OOP, which would allow us
2945        # to have this data all in one place
2946        return "Could not find a model with this ID" in error_msg
2947
2948    def get_portfolio_factors(self, model_id: str, portfolio_id: str) -> pd.DataFrame:
2949        url = f"{self.base_uri}/api/analysis/factors/{model_id}/{portfolio_id}"
2950        headers = {"Authorization": "ApiKey " + self.api_key}
2951        resp = requests.get(url, headers=headers, params=self._request_params)
2952
2953        json_resp = resp.json()
2954        if (resp.ok and "errors" in json_resp) or not resp.ok:
2955            error_msg = json_resp["errors"][0]
2956            if self._is_portfolio_still_running(error_msg):
2957                return pd.DataFrame()
2958            logger.error(error_msg)
2959            raise BoostedAPIException(
2960                (
2961                    f"Failed to get portfolio factors for {portfolio_id=}: "
2962                    f"{resp.status_code=}; {error_msg=}"
2963                )
2964            )
2965
2966        df = pd.DataFrame(json_resp["data"], columns=json_resp["header_row"])
2967
2968        def to_lower_snake_case(s):  # why are we linting lambdas? :(
2969            return "_".join(w.lower() for w in s.split(" "))
2970
2971        df = df.rename(columns={old: to_lower_snake_case(old) for old in df.columns}).set_index(
2972            "date"
2973        )
2974        df.index = pd.to_datetime(df.index)
2975        return df
2976
2977    def get_portfolio_volatility(self, model_id: str, portfolio_id: str) -> pd.DataFrame:
2978        url = f"{self.base_uri}/api/analysis/volatility_rolling/{model_id}/{portfolio_id}"
2979        headers = {"Authorization": "ApiKey " + self.api_key}
2980        resp = requests.get(url, headers=headers, params=self._request_params)
2981
2982        json_resp = resp.json()
2983        if (resp.ok and "errors" in json_resp) or not resp.ok:
2984            error_msg = json_resp["errors"][0]
2985            if self._is_portfolio_still_running(error_msg):
2986                return pd.DataFrame()
2987            logger.error(error_msg)
2988            raise BoostedAPIException(
2989                (
2990                    f"Failed to get portfolio volatility for {portfolio_id=}: "
2991                    f"{resp.status_code=}; {error_msg=}"
2992                )
2993            )
2994
2995        df = pd.DataFrame(json_resp["data"], columns=json_resp["headerRow"])
2996        df = df.rename(
2997            columns={old: old.lower().replace("avg", "avg_") for old in df.columns}  # type: ignore
2998        ).set_index("date")
2999        df.index = pd.to_datetime(df.index)
3000        return df
3001
3002    def get_portfolio_holdings(self, model_id: str, portfolio_id: str) -> pd.DataFrame:
3003        url = f"{self.base_uri}/api/models/{model_id}/{portfolio_id}/basket-data"
3004        headers = {"Authorization": "ApiKey " + self.api_key}
3005        resp = requests.get(url, headers=headers, params=self._request_params)
3006
3007        # this is a classic abuse of try/except as control flow: we try to get json body
3008        # from the response so that we can error-check. if this fails, we assume we have
3009        # a legit text response (corresponding to the csv data we care about)
3010        try:
3011            json_resp = resp.json()
3012        except json.decoder.JSONDecodeError:
3013            df = pd.read_csv(io.StringIO(resp.text), header=[0])
3014        else:
3015            error_msg = json_resp["errors"][0]
3016            if self._is_portfolio_still_running(error_msg):
3017                return pd.DataFrame()
3018            else:
3019                logger.error(error_msg)
3020                raise BoostedAPIException(
3021                    (
3022                        f"Failed to get portfolio holdings for {portfolio_id=}: "
3023                        f"{resp.status_code=}; {error_msg=}"
3024                    )
3025                )
3026
3027        df = df.rename(columns={old: old.lower() for old in df.columns}).set_index("date")
3028        df.index = pd.to_datetime(df.index)
3029        return df
3030
3031    def getStockDataTableForDate(
3032        self, model_id: str, portfolio_id: str, date: datetime.date
3033    ) -> pd.DataFrame:
3034        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
3035
3036        url_base = f"{self.base_uri}/api/analysis"
3037        url_params = f"{model_id}/{portfolio_id}"
3038        formatted_date = date.strftime("%Y-%m-%d")
3039
3040        stock_prices_url = f"{url_base}/stock-prices/{url_params}/{formatted_date}"
3041        stock_factors_url = f"{url_base}/stock-factors/{url_params}/date/{formatted_date}"
3042
3043        prices_params = {"useTicker": "false", "useCurrentSignals": "true"}
3044        factors_param = {"useTicker": "false", "useCurrentSignals": "true"}
3045
3046        prices_resp = requests.get(
3047            stock_prices_url, headers=headers, params=prices_params, **self._request_params
3048        )
3049        factors_resp = requests.get(
3050            stock_factors_url, headers=headers, params=factors_param, **self._request_params
3051        )
3052
3053        frames = []
3054        gbi_ids = set()
3055        for res in (prices_resp, factors_resp):
3056            if not res.ok:
3057                error_msg = self._try_extract_error_code(res)
3058                logger.error(error_msg)
3059                raise BoostedAPIException(
3060                    (
3061                        f"Failed to fetch stock data table for model {model_id}"
3062                        f" (it's possible no data is present for the given date: {date})."
3063                        f" Error message: {error_msg}"
3064                    )
3065                )
3066            result = res.json()
3067            df = pd.DataFrame(result)
3068            gbi_ids.update(df.columns.to_list())
3069            frames.append(pd.DataFrame(result))
3070
3071        all_gbiid_df = pd.concat(frames)
3072
3073        # Get the metadata of all GBI IDs
3074        gbiid_metadata_res = self._get_graphql(
3075            query=graphql_queries.GET_SEC_INFO_QRY, variables={"ids": [int(x) for x in gbi_ids]}
3076        )
3077        # Build a DF of metadata x GBI IDs
3078        gbiid_metadata_df = pd.DataFrame(
3079            {str(x["gbiId"]): x for x in gbiid_metadata_res["data"]["securities"]}
3080        )
3081        # Slice metadata we care. We'll drop "symbol" at the end.
3082        isin_country_currency_df = gbiid_metadata_df.loc[["isin", "country", "currency", "symbol"]]
3083        # Concatenate metadata to the existing stock data DF
3084        all_gbiid_with_metadata_df = pd.concat([all_gbiid_df, isin_country_currency_df])
3085        gbiid_with_symbol_df = all_gbiid_with_metadata_df.loc[
3086            :, all_gbiid_with_metadata_df.loc["symbol"].notna()
3087        ]
3088        renamed_df = gbiid_with_symbol_df.rename(
3089            index={"isin": "ISIN"}, columns=gbiid_with_symbol_df.loc["symbol"].to_dict()
3090        )
3091        output_df = renamed_df.drop(index=["symbol"])
3092        return output_df
3093
3094    def add_hedge_experiment_scenario(
3095        self,
3096        experiment_id: str,
3097        scenario_name: str,
3098        scenario_settings: PortfolioSettings,
3099        run_scenario_immediately: bool,
3100    ) -> HedgeExperimentScenario:
3101        add_scenario_input = {
3102            "hedgeExperimentId": experiment_id,
3103            "scenarioName": scenario_name,
3104            "portfolioSettingsJson": str(scenario_settings),
3105            "runExperimentOnScenario": run_scenario_immediately,
3106            "createDefaultPortfolio": "false",
3107        }
3108        qry = """
3109            mutation addHedgeExperimentScenario(
3110                $input: AddHedgeExperimentScenarioInput!
3111            ) {
3112                addHedgeExperimentScenario(input: $input) {
3113                    hedgeExperimentScenario {
3114                        hedgeExperimentScenarioId
3115                        scenarioName
3116                        description
3117                        portfolioSettingsJson
3118                    }
3119                }
3120            }
3121
3122        """
3123
3124        url = f"{self.base_uri}/api/graphql"
3125
3126        resp = requests.post(
3127            url,
3128            headers={"Authorization": "ApiKey " + self.api_key},
3129            json={"query": qry, "variables": {"input": add_scenario_input}},
3130        )
3131
3132        json_resp = resp.json()
3133        if (resp.ok and "errors" in json_resp) or not resp.ok:
3134            error_msg = self._try_extract_error_code(resp)
3135            logger.error(error_msg)
3136            raise BoostedAPIException(
3137                (f"Failed to add scenario: {resp.status_code=}; {error_msg=}")
3138            )
3139
3140        scenario_dict = json_resp["data"]["addHedgeExperimentScenario"]["hedgeExperimentScenario"]
3141        if scenario_dict is None:
3142            raise BoostedAPIException(
3143                "Failed to add scenario, likely due to bad experiment id or api key"
3144            )
3145        s = HedgeExperimentScenario.from_json_dict(scenario_dict)
3146        return s
3147
3148    # experiment life cycle has 4 steps:
3149    # 1. creation - essentially a very simple registration of a new instance, returning an id
3150    # 2. modify - populate with settings
3151    # 3. start - run the experiment
3152    # 4. delete - drop the experiment
3153    # while i would prefer to just have 2 funcs for (1,2,3) and (4) for a simpler api,
3154    # we need to expose finer-grained control becuase of how scenarios work.
3155    def create_hedge_experiment(
3156        self,
3157        name: str,
3158        description: str,
3159        experiment_type: hedge_experiment_type,
3160        target_securities: Union[Dict[GbiIdSecurity, float], str, None],
3161    ) -> HedgeExperiment:
3162        # we don't pass target_securities here (as much as id like to) because the
3163        # graphql input doesn't support it at this point
3164
3165        # note that this query returns a lot of null fields at this point, but
3166        # they are necessary for building a HE.
3167        create_qry = """
3168            mutation createDraftMutation($input: CreateHedgeExperimentDraftInput!) {
3169                createHedgeExperimentDraft(input: $input) {
3170                    hedgeExperiment {
3171                        hedgeExperimentId
3172                        experimentName
3173                        userId
3174                        config
3175                        description
3176                        experimentType
3177                        lastCalculated
3178                        lastModified
3179                        status
3180                        portfolioCalcStatus
3181                        targetSecurities {
3182                            gbiId
3183                            security {
3184                                gbiId
3185                                name
3186                                symbol
3187                            }
3188                            weight
3189                        }
3190                        baselineModel {
3191                            id
3192                            name
3193                        }
3194                        baselineScenario {
3195                            hedgeExperimentScenarioId
3196                            scenarioName
3197                            description
3198                            portfolioSettingsJson
3199                            hedgeExperimentPortfolios {
3200                                portfolio {
3201                                    id
3202                                    name
3203                                    modelId
3204                                    performanceGridHeader
3205                                    performanceGrid
3206                                    status
3207                                    tearSheet {
3208                                        groupName
3209                                        members {
3210                                            name
3211                                            value
3212                                        }
3213                                    }
3214                                }
3215                            }
3216                            status
3217                        }
3218                        baselineStockUniverseId
3219                    }
3220                }
3221            }
3222        """
3223
3224        create_input: Dict[str, Any] = {
3225            "name": name,
3226            "experimentType": experiment_type,
3227            "description": description,
3228        }
3229        if isinstance(target_securities, dict):
3230            create_input["setTargetSecurities"] = [
3231                {"gbiId": sec.gbi_id, "weight": weight}
3232                for (sec, weight) in target_securities.items()
3233            ]
3234        elif isinstance(target_securities, str):
3235            create_input["setTargetPortfolios"] = [{"portfolioId": target_securities}]
3236        elif target_securities is None:
3237            pass
3238        else:
3239            raise TypeError(
3240                "Expected value of type Union[Dict[GbiIdSecurity, str], str] for "
3241                f"argument 'target_securities'; got {type(target_securities)}"
3242            )
3243        resp = requests.post(
3244            f"{self.base_uri}/api/graphql",
3245            json={"query": create_qry, "variables": {"input": create_input}},
3246            headers={"Authorization": "ApiKey " + self.api_key},
3247            params=self._request_params,
3248        )
3249
3250        json_resp = resp.json()
3251        if (resp.ok and "errors" in json_resp) or not resp.ok:
3252            error_msg = self._try_extract_error_code(resp)
3253            logger.error(error_msg)
3254            raise BoostedAPIException(
3255                (f"Failed to create hedge experiment: {resp.status_code=}; {error_msg=}")
3256            )
3257
3258        exp_dict = json_resp["data"]["createHedgeExperimentDraft"]["hedgeExperiment"]
3259        experiment = HedgeExperiment.from_json_dict(exp_dict)
3260        return experiment
3261
3262    def modify_hedge_experiment(
3263        self,
3264        experiment_id: str,
3265        name: Optional[str] = None,
3266        description: Optional[str] = None,
3267        experiment_type: Optional[hedge_experiment_type] = None,
3268        target_securities: Union[Dict[GbiIdSecurity, float], str, None] = None,
3269        model_ids: Optional[List[str]] = None,
3270        stock_universe_ids: Optional[List[str]] = None,
3271        create_default_scenario: bool = True,
3272        baseline_model_id: Optional[str] = None,
3273        baseline_stock_universe_id: Optional[str] = None,
3274        baseline_portfolio_settings: Optional[str] = None,
3275    ) -> HedgeExperiment:
3276        mod_qry = """
3277            mutation modifyHedgeExperimentDraft(
3278                $input: ModifyHedgeExperimentDraftInput!
3279            ) {
3280                modifyHedgeExperimentDraft(input: $input) {
3281                    hedgeExperiment {
3282                    ...HedgeExperimentSelectedSecuritiesPageFragment
3283                    }
3284                }
3285            }
3286
3287            fragment HedgeExperimentSelectedSecuritiesPageFragment on HedgeExperiment {
3288                hedgeExperimentId
3289                experimentName
3290                userId
3291                config
3292                description
3293                experimentType
3294                lastCalculated
3295                lastModified
3296                status
3297                portfolioCalcStatus
3298                targetSecurities {
3299                    gbiId
3300                    security {
3301                        gbiId
3302                        name
3303                        symbol
3304                    }
3305                    weight
3306                }
3307                targetPortfolios {
3308                    portfolioId
3309                }
3310                baselineModel {
3311                    id
3312                    name
3313                }
3314                baselineScenario {
3315                    hedgeExperimentScenarioId
3316                    scenarioName
3317                    description
3318                    portfolioSettingsJson
3319                    hedgeExperimentPortfolios {
3320                        portfolio {
3321                            id
3322                            name
3323                            modelId
3324                            performanceGridHeader
3325                            performanceGrid
3326                            status
3327                            tearSheet {
3328                                groupName
3329                                members {
3330                                    name
3331                                    value
3332                                }
3333                            }
3334                        }
3335                    }
3336                    status
3337                }
3338                baselineStockUniverseId
3339            }
3340        """
3341        mod_input = {
3342            "hedgeExperimentId": experiment_id,
3343            "createDefaultScenario": create_default_scenario,
3344        }
3345        if name is not None:
3346            mod_input["newExperimentName"] = name
3347        if description is not None:
3348            mod_input["newExperimentDescription"] = description
3349        if experiment_type is not None:
3350            mod_input["newExperimentType"] = experiment_type
3351        if model_ids is not None:
3352            mod_input["setSelectdModels"] = model_ids
3353        if stock_universe_ids is not None:
3354            mod_input["selectedStockUniverseIds"] = stock_universe_ids
3355        if baseline_model_id is not None:
3356            mod_input["setBaselineModel"] = baseline_model_id
3357        if baseline_stock_universe_id is not None:
3358            mod_input["setBaselineStockUniverse"] = baseline_stock_universe_id
3359        if baseline_portfolio_settings is not None:
3360            mod_input["setBaselinePortfolioSettings"] = baseline_portfolio_settings
3361        # note that the behaviors bound to these data are mutually exclusive,
3362        # and its possible the opposite was set earlier in the DRAFT phase
3363        # of experiment creation, so when setting one, we must unset the other
3364        if isinstance(target_securities, dict):
3365            mod_input["setTargetSecurities"] = [
3366                {"gbiId": sec.gbi_id, "weight": weight}
3367                for (sec, weight) in target_securities.items()
3368            ]
3369            mod_input["setTargetPortfolios"] = None
3370        elif isinstance(target_securities, str):
3371            mod_input["setTargetPortfolios"] = [{"portfolioId": target_securities}]
3372            mod_input["setTargetSecurities"] = None
3373        elif target_securities is None:
3374            pass
3375        else:
3376            raise TypeError(
3377                "Expected value of type Union[Dict[GbiIdSecurity, str], str] "
3378                f"for argument 'target_securities'; got {type(target_securities)}"
3379            )
3380
3381        resp = requests.post(
3382            f"{self.base_uri}/api/graphql",
3383            json={"query": mod_qry, "variables": {"input": mod_input}},
3384            headers={"Authorization": "ApiKey " + self.api_key},
3385            params=self._request_params,
3386        )
3387
3388        json_resp = resp.json()
3389        if (resp.ok and "errors" in json_resp) or not resp.ok:
3390            error_msg = self._try_extract_error_code(resp)
3391            logger.error(error_msg)
3392            raise BoostedAPIException(
3393                (
3394                    f"Failed to modify hedge experiment in preparation for start {experiment_id=}: "
3395                    f"{resp.status_code=}; {error_msg=}"
3396                )
3397            )
3398
3399        exp_dict = json_resp["data"]["modifyHedgeExperimentDraft"]["hedgeExperiment"]
3400        experiment = HedgeExperiment.from_json_dict(exp_dict)
3401        return experiment
3402
3403    def start_hedge_experiment(self, experiment_id: str, *scenario_ids: str) -> HedgeExperiment:
3404        start_qry = """
3405            mutation startHedgeExperiment($input: StartHedgeExperimentInput!) {
3406                startHedgeExperiment(input: $input) {
3407                    hedgeExperiment {
3408                        hedgeExperimentId
3409                        experimentName
3410                        userId
3411                        config
3412                        description
3413                        experimentType
3414                        lastCalculated
3415                        lastModified
3416                        status
3417                        portfolioCalcStatus
3418                        targetSecurities {
3419                            gbiId
3420                            security {
3421                                gbiId
3422                                name
3423                                symbol
3424                            }
3425                            weight
3426                        }
3427                        targetPortfolios {
3428                            portfolioId
3429                        }
3430                        baselineModel {
3431                            id
3432                            name
3433                        }
3434                        baselineScenario {
3435                            hedgeExperimentScenarioId
3436                            scenarioName
3437                            description
3438                            portfolioSettingsJson
3439                            hedgeExperimentPortfolios {
3440                                portfolio {
3441                                    id
3442                                    name
3443                                    modelId
3444                                    performanceGridHeader
3445                                    performanceGrid
3446                                    status
3447                                    tearSheet {
3448                                        groupName
3449                                        members {
3450                                            name
3451                                            value
3452                                        }
3453                                    }
3454                                }
3455                            }
3456                            status
3457                        }
3458                        baselineStockUniverseId
3459                    }
3460                }
3461            }
3462        """
3463        start_input: Dict[str, Any] = {"hedgeExperimentId": experiment_id}
3464        if len(scenario_ids) > 0:
3465            start_input["hedgeExperimentScenarioIds"] = list(scenario_ids)
3466
3467        resp = requests.post(
3468            f"{self.base_uri}/api/graphql",
3469            json={"query": start_qry, "variables": {"input": start_input}},
3470            headers={"Authorization": "ApiKey " + self.api_key},
3471            params=self._request_params,
3472        )
3473
3474        json_resp = resp.json()
3475        if (resp.ok and "errors" in json_resp) or not resp.ok:
3476            error_msg = self._try_extract_error_code(resp)
3477            logger.error(error_msg)
3478            raise BoostedAPIException(
3479                (
3480                    f"Failed to start hedge experiment {experiment_id=}: "
3481                    f"{resp.status_code=}; {error_msg=}"
3482                )
3483            )
3484
3485        exp_dict = json_resp["data"]["startHedgeExperiment"]["hedgeExperiment"]
3486        experiment = HedgeExperiment.from_json_dict(exp_dict)
3487        return experiment
3488
3489    def delete_hedge_experiment(self, experiment_id: str) -> bool:
3490        delete_qry = """
3491            mutation($input: DeleteHedgeExperimentsInput!) {
3492                deleteHedgeExperiments(input: $input) {
3493                    success
3494                }
3495            }
3496        """
3497        delete_input = {"hedgeExperimentIds": [experiment_id]}
3498        resp = requests.post(
3499            f"{self.base_uri}/api/graphql",
3500            json={"query": delete_qry, "variables": {"input": delete_input}},
3501            headers={"Authorization": "ApiKey " + self.api_key},
3502            params=self._request_params,
3503        )
3504
3505        json_resp = resp.json()
3506        if (resp.ok and "errors" in json_resp) or not resp.ok:
3507            error_msg = self._try_extract_error_code(resp)
3508            logger.error(error_msg)
3509            raise BoostedAPIException(
3510                (
3511                    f"Failed to delete hedge experiment {experiment_id=}: "
3512                    + f"status_code={resp.status_code}; error_msg={error_msg}"
3513                )
3514            )
3515
3516        return json_resp["data"]["deleteHedgeExperiments"]["success"]
3517
3518    def create_hedge_basket_position_bounds_from_csv(
3519        self,
3520        filepath: str,
3521        name: str,
3522        description: Optional[str],
3523        mapping_result_filepath: Optional[str],
3524    ) -> str:
3525        DATE = "Date"
3526        ISIN = "ISIN"
3527        COUNTRY = "Country"
3528        CURRENCY = "Currency"
3529        LOWER_BOUND = "Lower Bound"
3530        UPPER_BOUND = "Upper Bound"
3531        supported_columns = {
3532            DATE,
3533            ISIN,
3534            COUNTRY,
3535            CURRENCY,
3536            LOWER_BOUND,
3537            UPPER_BOUND,
3538        }
3539        required_columns = {ISIN, LOWER_BOUND, UPPER_BOUND}
3540
3541        try:
3542            df: pd.DataFrame = pd.read_csv(filepath, parse_dates=True)
3543        except Exception as e:
3544            raise BoostedAPIException(f"Error reading {filepath=}: {e}")
3545
3546        columns = set(df.columns)
3547
3548        # First perform basic data validation
3549        missing_required_columns = required_columns - columns
3550        if missing_required_columns:
3551            raise BoostedAPIException(
3552                f"The following required columns are missing: {missing_required_columns}"
3553            )
3554        extra_columns = columns - supported_columns
3555        if extra_columns:
3556            logger.warning(
3557                f"The following columns are unsupported and will be ignored: {extra_columns}"
3558            )
3559        try:
3560            df[LOWER_BOUND] = df[LOWER_BOUND].astype(float)
3561            df[UPPER_BOUND] = df[UPPER_BOUND].astype(float)
3562            df[ISIN] = df[ISIN].astype(str)
3563        except Exception as e:
3564            raise BoostedAPIException(f"Column datatypes are incorrect: {e}")
3565        lb_gt_ub = df[df[LOWER_BOUND] > df[UPPER_BOUND]]
3566        if not lb_gt_ub.empty:
3567            raise BoostedAPIException(
3568                f"Lower Bound must be <= Upper Bound, but these are not: {lb_gt_ub[ISIN].tolist()}"
3569            )
3570        out_of_range = df[
3571            (
3572                (df[LOWER_BOUND] < 0)
3573                | (df[LOWER_BOUND] > 1)
3574                | (df[UPPER_BOUND] < 0)
3575                | (df[UPPER_BOUND] > 1)
3576            )
3577        ]
3578        if not out_of_range.empty:
3579            raise BoostedAPIException("Lower Bound and Upper Bound values must be in range [0, 1]")
3580
3581        # Now map the security info into GBI IDs
3582        rows = list(df.to_dict(orient="index").values())
3583        sec_data_list = self.getGbiIdFromIdentCountryCurrencyDate(
3584            ident_country_currency_dates=[
3585                DateIdentCountryCurrency(
3586                    date=row.get(DATE, datetime.date.today().isoformat()),
3587                    identifier=row.get(ISIN),
3588                    id_type=ColumnSubRole.ISIN,
3589                    country=row.get(COUNTRY),
3590                    currency=row.get(CURRENCY),
3591                )
3592                for row in rows
3593            ]
3594        )
3595
3596        # Now take each row and its gbi id mapping, and create the bounds list
3597        bounds = []
3598        for row, sec_data in zip(rows, sec_data_list):
3599            if sec_data is None:
3600                logger.warning(f"Failed to map {row[ISIN]}, skipping this security.")
3601            else:
3602                bounds.append(
3603                    {"gbi_id": str(sec_data.gbi_id), "lb": row[LOWER_BOUND], "ub": row[UPPER_BOUND]}
3604                )
3605
3606                # Add security metadata to see the mapping
3607                row["Mapped GBI ID"] = sec_data.gbi_id
3608                row[f"Mapped {ISIN}"] = sec_data.isin_info.identifier
3609                row[f"Mapped {COUNTRY}"] = sec_data.isin_info.country
3610                row[f"Mapped {CURRENCY}"] = sec_data.isin_info.currency
3611                row["Mapped Ticker"] = sec_data.ticker
3612                row["Mapped Company Name"] = sec_data.company_name
3613
3614        # Call endpoint to create the bounds settings template
3615        qry = """
3616              mutation CreatePartialStrategyTemplate(
3617                $portfolioSettingsKey: String!
3618                $partialSettings: String!
3619                $name: String!
3620                $description: String
3621              ) {
3622                createPartialStrategyTemplate(
3623                  portfolioSettingsKey: $portfolioSettingsKey
3624                  partialSettings: $partialSettings
3625                  name: $name
3626                  description: $description
3627                )
3628              }
3629            """
3630        variables = {
3631            "portfolioSettingsKey": "basketTrading.positionSizeBounds",
3632            "partialSettings": json.dumps(bounds),
3633            "name": name,
3634            "description": description,
3635        }
3636        resp = self._get_graphql(qry, variables=variables)
3637
3638        # Write mapped csv for reference
3639        if mapping_result_filepath is not None:
3640            pd.DataFrame(rows).to_csv(mapping_result_filepath)
3641
3642        return resp["data"]["createPartialStrategyTemplate"]
3643
3644    def get_hit_rate_file(self, model_id: str, portfolio_id: str, file_key: str) -> dict:
3645        url = f"{self.base_uri}{ROUTE_PREFIX}{DAL_PA_ROUTE}/get-hit-rate-file/"
3646        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
3647        req_json = {"model_id": model_id, "portfolio_id": portfolio_id, "file_key": file_key}
3648        res = requests.post(url, json=req_json, headers=headers, **self._request_params)
3649        if not res.ok:
3650            error_msg = self._try_extract_error_code(res)
3651            logger.error(error_msg)
3652            raise BoostedAPIException(f"Failed to get Hit Rate file: {error_msg}")
3653
3654        data = res.json()
3655        return data
3656
3657    def get_hit_rate_with_securities(
3658        self,
3659        model_id: str,
3660        portfolio_id: str,
3661        meet_all_conditions: bool,
3662        securities: List[str],
3663        countries: List[str],
3664        sectors: List[str],
3665        start_date: Optional[BoostedDate],
3666        end_date: Optional[BoostedDate],
3667    ) -> dict:
3668
3669        start_date, end_date = get_date_range(start_date=start_date, end_date=end_date)
3670        start_date, end_date = start_date.isoformat(), end_date.isoformat()
3671
3672        url = f"{self.base_uri}{WATCHLIST_ROUTE_PREFIX}{DAL_PA_ROUTE}/get-hit-rate/"  # noqa f"http://0.0.0.0:8000{DAL_PA_ROUTE}/get-securities-hit-rate/"
3673        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
3674        req_json = {
3675            "model_id": model_id,
3676            "portfolio_id": portfolio_id,
3677            "meet_all_conditions": meet_all_conditions,
3678            "securities": securities,
3679            "countries": countries,
3680            "sectors": sectors,
3681            "start_date": start_date,
3682            "end_date": end_date,
3683        }
3684        res = requests.post(url, json=req_json, headers=headers, **self._request_params)
3685
3686        if not res.ok:
3687            error_msg = self._try_extract_error_code(res)
3688            logger.error(error_msg)
3689            raise BoostedAPIException(f"Failed to get Hit Rate with securities: {error_msg}")
3690
3691        data = res.json()
3692        return data
3693
3694    def get_portfolio_accuracy(
3695        self,
3696        model_id: str,
3697        portfolio_id: str,
3698        start_date: Optional[BoostedDate] = None,
3699        end_date: Optional[BoostedDate] = None,
3700    ) -> dict:
3701        if start_date and end_date:
3702            validate_start_and_end_dates(start_date=start_date, end_date=end_date)
3703            start_date = convert_date(start_date)
3704            end_date = convert_date(end_date)
3705
3706        # TODO: Later change this URI to not use the watchlist prefix. It is misnamed.
3707        url = f"{self.base_uri}{WATCHLIST_ROUTE_PREFIX}{DAL_PA_ROUTE}/get-hit-rate/"
3708        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
3709        req_json = {"model_id": model_id, "portfolio_id": portfolio_id}
3710        if start_date and end_date:
3711            req_json["start_date"] = start_date.isoformat()
3712            req_json["end_date"] = end_date.isoformat()
3713        res = requests.post(url, json=req_json, headers=headers, **self._request_params)
3714
3715        if not res.ok:
3716            error_msg = self._try_extract_error_code(res)
3717            logger.error(error_msg)
3718            raise BoostedAPIException(f"Failed to get Hit Rate: {error_msg}")
3719
3720        data = res.json()
3721        return data
3722
3723    def create_watchlist(self, name: str) -> str:
3724        url = f"{self.base_uri}{WATCHLIST_ROUTE_PREFIX}{DAL_WATCHLIST_ROUTE}/create/"
3725        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
3726        req_json = {"name": name}
3727        res = requests.post(url, json=req_json, headers=headers, **self._request_params)
3728
3729        if not res.ok:
3730            error_msg = self._try_extract_error_code(res)
3731            logger.error(error_msg)
3732            raise BoostedAPIException(f"Failed to get user models: {error_msg}")
3733
3734        data = res.json()
3735        return data["watchlist_id"]
3736
3737    def _get_graphql(
3738        self,
3739        query: str,
3740        variables: Dict,
3741        error_msg_prefix: str = "Failed to get graphql result: ",
3742        log_error: bool = True,
3743    ) -> Dict:
3744        headers = {"Authorization": "ApiKey " + self.api_key}
3745        json_req = {"query": query, "variables": variables}
3746
3747        url = self.base_uri + "/api/graphql"
3748        resp = requests.post(
3749            url,
3750            json=json_req,
3751            headers=headers,
3752            params=self._request_params,
3753        )
3754
3755        # graphql endpoints typically return 200 or 400 status codes, so we must
3756        # check if we have any errors, even with a 200
3757        if not resp.ok or (resp.ok and "errors" in resp.json()):
3758            error_msg = self._try_extract_error_code(resp)
3759            error_str = str(error_msg_prefix) + f" {resp.status_code=}; {error_msg=}"
3760            if log_error:
3761                logger.error(error_str)
3762            raise BoostedAPIException(error_str)
3763
3764        json_resp = resp.json()
3765        return json_resp
3766
3767    def _get_security_info(self, gbi_ids: List[int]) -> Dict:
3768        query = graphql_queries.GET_SEC_INFO_QRY
3769        variables = {
3770            "ids": [] if not gbi_ids else gbi_ids,
3771        }
3772
3773        error_msg_prefix = "Failed to get Security Details:"
3774        return self._get_graphql(
3775            query=query, variables=variables, error_msg_prefix=error_msg_prefix
3776        )
3777
3778    def _get_sector_info(self) -> Dict:
3779        """
3780        Returns a list of sector objects, e.g.
3781        {
3782            "id": 1010,
3783            "parentId": 10,
3784            "name": "Energy",
3785            "topParentName": null,
3786            "spiqSectorId": -1,
3787            "legacy": false
3788        }
3789        """
3790        url = f"{self.base_uri}/api/sectors"
3791        headers = {"Authorization": "ApiKey " + self.api_key}
3792        res = requests.get(url, headers=headers, **self._request_params)
3793        self._check_ok_or_err_with_msg(res, "Failed to get sectors data")
3794        return res.json()["sectors"]
3795
3796    def _get_watchlist_analysis(
3797        self,
3798        gbi_ids: List[int],
3799        model_ids: List[str],
3800        portfolio_ids: List[str],
3801        asof_date=datetime.date.today(),
3802    ) -> Dict:
3803        query = graphql_queries.WATCHLIST_ANALYSIS_QRY
3804        variables = {
3805            "gbiIds": gbi_ids,
3806            "modelIds": model_ids,
3807            "portfolioIds": portfolio_ids,
3808            "date": self.__iso_format(asof_date),
3809        }
3810        error_msg_prefix = "Failed to get Coverage Analysis:"
3811        return self._get_graphql(
3812            query=query, variables=variables, error_msg_prefix=error_msg_prefix
3813        )
3814
3815    def _get_models_for_portfolio(self, portfolio_ids: List[str]) -> Dict:
3816        query = graphql_queries.GET_MODELS_FOR_PORTFOLIOS_QRY
3817        variables = {"ids": portfolio_ids}
3818        error_msg_prefix = "Failed to get Models for Portfolios: "
3819        return self._get_graphql(
3820            query=query, variables=variables, error_msg_prefix=error_msg_prefix
3821        )
3822
3823    def _get_excess_return(
3824        self, model_ids: List[str], gbi_ids: List[int], asof_date=datetime.date.today()
3825    ) -> Dict:
3826        query = graphql_queries.GET_EXCESS_RETURN_QRY
3827
3828        variables = {
3829            "modelIds": model_ids,
3830            "gbiIds": gbi_ids,
3831            "date": self.__iso_format(asof_date),
3832        }
3833        error_msg_prefix = "Failed to get Excess Return Slugging Pct: "
3834        return self._get_graphql(
3835            query=query, variables=variables, error_msg_prefix=error_msg_prefix
3836        )
3837
3838    def _coverage_column_name_format(self, in_str) -> str:
3839        if in_str.upper() == "ISIN":
3840            return "ISIN"
3841
3842        return in_str.title()
3843
3844    def _get_model_stocks(self, model_id: str) -> List[GbiIdTickerISIN]:
3845        # first, get the universe id
3846        resp = self._get_graphql(
3847            graphql_queries.GET_MODEL_STOCK_UNIVERSE_ID_QUERY,
3848            variables={"modelId": model_id},
3849            error_msg_prefix="Failed to get model stock universe ID",
3850        )
3851        universe_id = resp["data"]["model"]["stockUniverseId"]
3852
3853        # now, query for universe stocks
3854        url = self.base_uri + f"/api/stocks/model-universe/{universe_id}"
3855        headers = {"Authorization": "ApiKey " + self.api_key}
3856        universe_resp = requests.get(url, headers=headers, **self._request_params)
3857        universe = universe_resp.json()["stockUniverse"]
3858        securities = [
3859            GbiIdTickerISIN(gbi_id=security["id"], ticker=security["symbol"], isin=security["isin"])
3860            for security in universe
3861        ]
3862        return securities
3863
3864    def get_coverage_info(self, watchlist_id: str, portfolio_group_id: str) -> pd.DataFrame:
3865        # get securities list in watchlist
3866        watchlist_details = self.get_watchlist_details(watchlist_id)
3867        security_list = watchlist_details["targets"]
3868
3869        gbi_ids = [x["gbi_id"] for x in security_list]
3870
3871        gbi_data: Dict[Any, Dict] = {x: {} for x in gbi_ids}
3872
3873        # get security info ticker, name, industry etc
3874        sec_info = self._get_security_info(gbi_ids)
3875
3876        for sec in sec_info["data"]["securities"]:
3877            gbi_id = sec["gbiId"]
3878            for k in ["symbol", "name", "isin", "country", "currency"]:
3879                gbi_data[gbi_id][self._coverage_column_name_format(k)] = sec[k]
3880
3881            gbi_data[gbi_id][self._coverage_column_name_format("Sector")] = sec["sector"][
3882                "topParentName"
3883            ]
3884
3885        # get portfolios list in portfolio_Group
3886        portfolio_group = self.get_portfolio_group(portfolio_group_id)
3887        portfolio_ids = [x["portfolio_id"] for x in portfolio_group["portfolios"]]
3888        portfolio_info = {x["portfolio_id"]: x for x in portfolio_group["portfolios"]}
3889
3890        model_resp = self._get_models_for_portfolio(portfolio_ids=portfolio_ids)
3891        for portfolio in model_resp["data"]["portfolios"]:
3892            portfolio_info[portfolio["id"]].update(portfolio)
3893
3894        model_info = {
3895            x["modelId"]: portfolio_info[x["id"]] for x in model_resp["data"]["portfolios"]
3896        }
3897
3898        # model_ids and portfolio_ids are parallel arrays
3899        model_ids = [portfolio_info[x]["modelId"] for x in portfolio_ids]
3900
3901        # graphql: get watchlist analysis
3902        wl_analysis = self._get_watchlist_analysis(
3903            gbi_ids=gbi_ids,
3904            model_ids=model_ids,
3905            portfolio_ids=portfolio_ids,
3906            asof_date=datetime.date.today(),
3907        )
3908
3909        portfolio_gbi_data: Dict[Any, Dict] = {k: {} for k in portfolio_ids}
3910        for pi, v in portfolio_gbi_data.items():
3911            v.update({k: {} for k in gbi_data.keys()})
3912
3913        equity_explorer_date = wl_analysis["data"]["watchlistAnalysis"][0]["analysisDates"][0][
3914            "date"
3915        ]
3916        for wla in wl_analysis["data"]["watchlistAnalysis"]:
3917            gbi_id = wla["gbiId"]
3918            gbi_data[gbi_id]["Composite Rating"] = wla["analysisDates"][0]["aggregateSignal"][
3919                "rating"
3920            ]
3921            gbi_data[gbi_id]["Composite Rating Delta"] = wla["analysisDates"][0]["aggregateSignal"][
3922                "ratingDelta"
3923            ]
3924
3925            for p in wla["analysisDates"][0]["portfoliosSignals"]:
3926                model_name = portfolio_info[p["portfolioId"]]["modelName"]
3927
3928                portfolio_gbi_data[p["portfolioId"]][gbi_id][
3929                    model_name + self._coverage_column_name_format(": rank")
3930                ] = (p["rank"] + 1)
3931                portfolio_gbi_data[p["portfolioId"]][gbi_id][
3932                    model_name + self._coverage_column_name_format(": rank delta")
3933                ] = (-1 * p["signalDelta"])
3934                portfolio_gbi_data[p["portfolioId"]][gbi_id][
3935                    model_name + self._coverage_column_name_format(": rating")
3936                ] = p["rating"]
3937                portfolio_gbi_data[p["portfolioId"]][gbi_id][
3938                    model_name + self._coverage_column_name_format(": rating delta")
3939                ] = p["ratingDelta"]
3940
3941        neg_rec: Dict[Any, Dict] = {k: {} for k in gbi_data.keys()}
3942        pos_rec: Dict[Any, Dict] = {k: {} for k in gbi_data.keys()}
3943        for wla in wl_analysis["data"]["watchlistAnalysis"]:
3944            gbi_id = wla["gbiId"]
3945
3946            for pid, signals in zip(portfolio_ids, wla["analysisDates"][0]["portfoliosSignals"]):
3947                model_name = portfolio_info[pid]["modelName"]
3948                neg_rec[gbi_id][
3949                    model_name + self._coverage_column_name_format(": negative recommendation")
3950                ] = signals["explainWeightNeg"]
3951                pos_rec[gbi_id][
3952                    model_name + self._coverage_column_name_format(": positive recommendation")
3953                ] = signals["explainWeightPos"]
3954
3955        # graphql: GetExcessReturn - slugging pct
3956        er_sp = self._get_excess_return(
3957            model_ids=model_ids, gbi_ids=gbi_ids, asof_date=equity_explorer_date
3958        )
3959
3960        for model in er_sp["data"]["models"]:
3961            model_name = model_info[model["id"]]["modelName"]
3962            for stat in model["equityExplorerData"]["equityExplorerSummaryStatistics"]:
3963                portfolioId = model_info[model["id"]]["id"]
3964                portfolio_gbi_data[portfolioId][int(stat["gbiId"])][
3965                    model_name + self._coverage_column_name_format(": slugging %")
3966                ] = (stat["ER"]["SP"]["sixMonthWindowOneMonthHorizon"] * 100)
3967
3968        # add rank, rating, slugging
3969        for pid, v in portfolio_gbi_data.items():
3970            for gbi_id, vv in v.items():
3971                gbi_data[gbi_id].update(vv)
3972
3973        # add neg/pos rec scores
3974        for rec in [neg_rec, pos_rec]:
3975            for k, v in rec.items():
3976                gbi_data[k].update(v)
3977
3978        df = pd.DataFrame.from_records([v for _, v in gbi_data.items()])
3979
3980        return df
3981
3982    def get_coverage_csv(
3983        self, watchlist_id: str, portfolio_group_id: str, filepath: Optional[str] = None
3984    ) -> Optional[str]:
3985        """
3986        Converts the coverage contents to CSV format
3987        Parameters
3988        ----------
3989        watchlist_id: str
3990            UUID str identifying the coverage watchlist
3991        portfolio_group_id: str
3992            UUID str identifying the group of portfolio to use for analysis
3993        filepath: Optional[str]
3994            UUID str identifying the group of portfolio to use for analysis
3995
3996        Returns:
3997        ----------
3998        None if filepath is provided, else a string with a csv's contents is returned
3999        """
4000
4001        df = self.get_coverage_info(watchlist_id, portfolio_group_id)
4002
4003        return df.to_csv(filepath, index=False, float_format="%.4f")
4004
4005    def get_watchlist_details(self, watchlist_id: str) -> Dict:
4006        url = f"{self.base_uri}{ROUTE_PREFIX}{DAL_WATCHLIST_ROUTE}/details/"
4007        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
4008        req_json = {"watchlist_id": watchlist_id}
4009        res = requests.post(url, json=req_json, headers=headers, **self._request_params)
4010
4011        if not res.ok:
4012            error_msg = self._try_extract_error_code(res)
4013            logger.error(error_msg)
4014            raise BoostedAPIException(f"Failed to get user models: {error_msg}")
4015
4016        data = res.json()
4017        return data
4018
4019    def create_watchlist_from_file(self, name: str, filepath: str) -> str:
4020        url = f"{self.base_uri}{ROUTE_PREFIX}{DAL_WATCHLIST_ROUTE}/create_watchlist_from_file/"
4021        headers = {"Authorization": "ApiKey " + self.api_key}
4022
4023        with open(filepath, "rb") as fp:
4024            file_bytes = fp.read()
4025
4026        file_bytes_base64 = base64.b64encode(file_bytes).decode("ascii")
4027        json_req = {
4028            "content_type": mimetypes.guess_type(filepath)[0],
4029            "file_bytes_base64": file_bytes_base64,
4030            "name": name,
4031        }
4032
4033        res = requests.post(url, json=json_req, headers=headers)
4034
4035        if not res.ok:
4036            error_msg = self._try_extract_error_code(res)
4037            logger.error(error_msg)
4038            raise BoostedAPIException(f"Failed to create watchlist from file: {error_msg}")
4039
4040        data = res.json()
4041        return data["watchlist_id"]
4042
4043    def get_watchlists(self) -> List[Dict]:
4044        url = f"{self.base_uri}{ROUTE_PREFIX}{DAL_WATCHLIST_ROUTE}/get_user_watchlists/"
4045        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
4046        req_json: Dict = {}
4047        res = requests.post(url, json=req_json, headers=headers, **self._request_params)
4048
4049        if not res.ok:
4050            error_msg = self._try_extract_error_code(res)
4051            logger.error(error_msg)
4052            raise BoostedAPIException(f"Failed to get user watchlists: {error_msg}")
4053
4054        data = res.json()
4055        return data["watchlists"]
4056
4057    def get_watchlist_contents(self, watchlist_id) -> Dict:
4058        url = f"{self.base_uri}{ROUTE_PREFIX}{DAL_WATCHLIST_ROUTE}/contents/"
4059        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
4060        req_json = {"watchlist_id": watchlist_id}
4061        res = requests.post(url, json=req_json, headers=headers, **self._request_params)
4062
4063        if not res.ok:
4064            error_msg = self._try_extract_error_code(res)
4065            logger.error(error_msg)
4066            raise BoostedAPIException(f"Failed to get watchlist contents: {error_msg}")
4067
4068        data = res.json()
4069        return data
4070
4071    def get_watchlist_contents_as_csv(self, watchlist_id, filepath) -> None:
4072        data = self.get_watchlist_contents(watchlist_id)
4073        df = pd.DataFrame(data["contents"])
4074        df.to_csv(filepath, index=False)
4075
4076    # TODO this will need to be enhanced to accept country/currency overrides
4077    def add_securities_to_watchlist(
4078        self, watchlist_id: str, identifiers: List[str], identifier_type: Literal["TICKER", "ISIN"]
4079    ) -> Dict:
4080        # should we just make the arg lower? all caps has a flag-like feel to it
4081        id_type = identifier_type.lower()
4082        url = f"{self.base_uri}{WATCHLIST_ROUTE_PREFIX}{DAL_WATCHLIST_ROUTE}/add_{id_type}s/"
4083        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
4084        req_json = {"watchlist_id": watchlist_id, id_type: identifiers}
4085        res = requests.post(url, json=req_json, headers=headers, **self._request_params)
4086
4087        if not res.ok:
4088            error_msg = self._try_extract_error_code(res)
4089            logger.error(error_msg)
4090            raise BoostedAPIException(f"Failed to get user models: {error_msg}")
4091
4092        data = res.json()
4093        return data
4094
4095    def remove_securities_from_watchlist(
4096        self, watchlist_id: str, identifiers: List[str], identifier_type: Literal["TICKER", "ISIN"]
4097    ) -> Dict:
4098        # should we just make the arg lower? all caps has a flag-like feel to it
4099        id_type = identifier_type.lower()
4100        url = f"{self.base_uri}{WATCHLIST_ROUTE_PREFIX}{DAL_WATCHLIST_ROUTE}/remove_{id_type}s/"
4101        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
4102        req_json = {"watchlist_id": watchlist_id, id_type: identifiers}
4103        res = requests.post(url, json=req_json, headers=headers, **self._request_params)
4104
4105        if not res.ok:
4106            error_msg = self._try_extract_error_code(res)
4107            logger.error(error_msg)
4108            raise BoostedAPIException(f"Failed to get user models: {error_msg}")
4109
4110        data = res.json()
4111        return data
4112
4113    def get_portfolio_groups(
4114        self,
4115    ) -> Dict:
4116        """
4117        Parameters: None
4118
4119
4120        Returns:
4121        ----------
4122
4123        Dict:  {
4124        user_id: str
4125        portfolio_groups: List[PortfolioGroup]
4126        }
4127        where PortfolioGroup is defined as = Dict {
4128        group_id: str
4129        group_name: str
4130        portfolios: List[PortfolioInGroup]
4131        }
4132        where PortfolioInGroup is defined as = Dict {
4133        portfolio_id: str
4134        rank_in_group: Optional[int]
4135        }
4136        """
4137        url = f"{self.base_uri}{WATCHLIST_ROUTE_PREFIX}{PORTFOLIO_GROUP_ROUTE}/get"
4138        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
4139        req_json: Dict = {}
4140        res = requests.post(url, json=req_json, headers=headers, **self._request_params)
4141
4142        if not res.ok:
4143            error_msg = self._try_extract_error_code(res)
4144            logger.error(error_msg)
4145            raise BoostedAPIException(f"Failed to get user portfolio groups: {error_msg}")
4146
4147        data = res.json()
4148        return data
4149
4150    def get_portfolio_group(self, portfolio_group_id: str) -> Dict:
4151        """
4152        Parameters:
4153        portfolio_group_id: str
4154           UUID identifier for the portfolio group
4155
4156
4157        Returns:
4158        ----------
4159
4160        PortfolioGroup: Dict:  {
4161        group_id: str
4162        group_name: str
4163        portfolios: List[PortfolioInGroup]
4164        }
4165        where PortfolioInGroup is defined as = Dict {
4166        portfolio_id: str
4167        portfolio_name: str
4168        rank_in_group: Optional[int]
4169        }
4170        """
4171        url = f"{self.base_uri}{WATCHLIST_ROUTE_PREFIX}{PORTFOLIO_GROUP_ROUTE}/get-one"
4172        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
4173        req_json = {"portfolio_group_id": portfolio_group_id}
4174        res = requests.post(url, json=req_json, headers=headers, **self._request_params)
4175
4176        if not res.ok:
4177            error_msg = self._try_extract_error_code(res)
4178            logger.error(error_msg)
4179            raise BoostedAPIException(f"Failed to get user portfolio groups: {error_msg}")
4180
4181        data = res.json()
4182        return data
4183
4184    def set_sticky_portfolio_group(
4185        self,
4186        portfolio_group_id: str,
4187    ) -> Dict:
4188        """
4189        Set sticky portfolio group
4190
4191        Parameters
4192        ----------
4193
4194        group_id: str,
4195           UUID str identifying a portfolio group
4196
4197        Returns:
4198        -------
4199        Dict {
4200            changed: int - 1 == success
4201        }
4202        """
4203        url = f"{self.base_uri}{WATCHLIST_ROUTE_PREFIX}{PORTFOLIO_GROUP_ROUTE}/set-sticky"
4204        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
4205        req_json = {"portfolio_group_id": portfolio_group_id}
4206        res = requests.post(url, json=req_json, headers=headers, **self._request_params)
4207
4208        if not res.ok:
4209            error_msg = self._try_extract_error_code(res)
4210            logger.error(error_msg)
4211            raise BoostedAPIException(f"Failed to set sticky portfolio group: {error_msg}")
4212
4213        data = res.json()
4214        return data
4215
4216    def get_sticky_portfolio_group(
4217        self,
4218    ) -> Dict:
4219        """
4220        Get sticky portfolio group for the user
4221
4222        Parameters
4223        ----------
4224
4225        Returns:
4226        -------
4227        Dict {
4228            group_id: str
4229            group_name: str
4230            portfolios: List[PortfolioInGroup(Dict)]
4231                  PortfolioInGroup(Dict):
4232                           portfolio_id: str
4233                           rank_in_group: Optional[int] = None
4234                           portfolio_name: Optional[str] = None
4235        }
4236        """
4237        url = f"{self.base_uri}{WATCHLIST_ROUTE_PREFIX}{PORTFOLIO_GROUP_ROUTE}/get-sticky"
4238        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
4239        req_json: Dict = {}
4240        res = requests.post(url, json=req_json, headers=headers, **self._request_params)
4241
4242        if not res.ok:
4243            error_msg = self._try_extract_error_code(res)
4244            logger.error(error_msg)
4245            raise BoostedAPIException(f"Failed to get sticky portfolio group: {error_msg}")
4246
4247        data = res.json()
4248        return data
4249
4250    def create_portfolio_group(
4251        self,
4252        group_name: str,
4253        portfolios: Optional[List[Dict]] = None,
4254    ) -> Dict:
4255        """
4256        Create a new portfolio group
4257
4258        Parameters
4259        ----------
4260
4261        group_name: str
4262           name of the new group
4263
4264        portfolios: List of Dict [:
4265
4266        portfolio_id: str
4267        rank_in_group: Optional[int] = None
4268        ]
4269
4270        Returns:
4271        ----------
4272
4273        Dict: {
4274        group_id: str
4275           UUID identifier for the portfolio group
4276
4277        created: int
4278           num groups created, 1 == success
4279
4280        added: int
4281           num portfolios added to the group, should match the length of 'portfolios' argument
4282        }
4283        """
4284        url = f"{self.base_uri}{WATCHLIST_ROUTE_PREFIX}{PORTFOLIO_GROUP_ROUTE}/create"
4285        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
4286        req_json = {"group_name": group_name, "portfolios": portfolios}
4287
4288        res = requests.post(url, json=req_json, headers=headers, **self._request_params)
4289
4290        if not res.ok:
4291            error_msg = self._try_extract_error_code(res)
4292            logger.error(error_msg)
4293            raise BoostedAPIException(f"Failed to create portfolio group: {error_msg}")
4294
4295        data = res.json()
4296        return data
4297
4298    def rename_portfolio_group(
4299        self,
4300        group_id: str,
4301        group_name: str,
4302    ) -> Dict:
4303        """
4304        Rename a portfolio group
4305
4306        Parameters
4307        ----------
4308
4309        group_id: str,
4310           UUID str identifying a portfolio group
4311
4312        group_name: str,
4313           The new name for the porfolio
4314
4315        Returns:
4316        -------
4317        Dict {
4318            changed: int - 1 == success
4319        }
4320        """
4321        url = f"{self.base_uri}{WATCHLIST_ROUTE_PREFIX}{PORTFOLIO_GROUP_ROUTE}/rename"
4322        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
4323        req_json = {"group_id": group_id, "group_name": group_name}
4324        res = requests.post(url, json=req_json, headers=headers, **self._request_params)
4325
4326        if not res.ok:
4327            error_msg = self._try_extract_error_code(res)
4328            logger.error(error_msg)
4329            raise BoostedAPIException(f"Failed to rename portfolio group: {error_msg}")
4330
4331        data = res.json()
4332        return data
4333
4334    def add_to_portfolio_group(
4335        self,
4336        group_id: str,
4337        portfolios: List[Dict],
4338    ) -> Dict:
4339        """
4340        Add portfolios to a group
4341
4342        Parameters
4343        ----------
4344
4345        group_id: str,
4346           UUID str identifying a portfolio group
4347
4348        portfolios: List of Dict [:
4349            portfolio_id: str
4350            rank_in_group: Optional[int] = None
4351        ]
4352
4353
4354        Returns:
4355        -------
4356        Dict {
4357            added: int
4358               number of successful changes
4359        }
4360        """
4361        url = f"{self.base_uri}{WATCHLIST_ROUTE_PREFIX}{PORTFOLIO_GROUP_ROUTE}/add-to-group"
4362        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
4363        req_json = {"group_id": group_id, "portfolios": portfolios}
4364
4365        res = requests.post(url, json=req_json, headers=headers, **self._request_params)
4366
4367        if not res.ok:
4368            error_msg = self._try_extract_error_code(res)
4369            logger.error(error_msg)
4370            raise BoostedAPIException(f"Failed to add portfolios to portfolio group: {error_msg}")
4371
4372        data = res.json()
4373        return data
4374
4375    def remove_from_portfolio_group(
4376        self,
4377        group_id: str,
4378        portfolios: List[str],
4379    ) -> Dict:
4380        """
4381        Remove portfolios from a group
4382
4383        Parameters
4384        ----------
4385
4386        group_id: str,
4387           UUID str identifying a portfolio group
4388
4389        portfolios: List of str
4390
4391
4392        Returns:
4393        -------
4394        Dict {
4395            removed: int
4396               number of successful changes
4397        }
4398        """
4399        url = f"{self.base_uri}{WATCHLIST_ROUTE_PREFIX}{PORTFOLIO_GROUP_ROUTE}/remove-from-group"
4400        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
4401        req_json = {"group_id": group_id, "portfolios": portfolios}
4402        res = requests.post(url, json=req_json, headers=headers, **self._request_params)
4403
4404        if not res.ok:
4405            error_msg = self._try_extract_error_code(res)
4406            logger.error(error_msg)
4407            raise BoostedAPIException(
4408                f"Failed to remove portfolios from portfolio group: {error_msg}"
4409            )
4410
4411        data = res.json()
4412        return data
4413
4414    def delete_portfolio_group(
4415        self,
4416        group_id: str,
4417    ) -> Dict:
4418        """
4419        Delete a portfolio group
4420
4421        Parameters
4422        ----------
4423
4424        group_id: str,
4425           UUID str identifying a portfolio group
4426
4427
4428        Returns:
4429        -------
4430        Dict {
4431            removed_groups: int
4432               number of successful changes
4433
4434            removed_portfolios: int
4435               number of successful changes
4436        }
4437        """
4438        url = f"{self.base_uri}{WATCHLIST_ROUTE_PREFIX}{PORTFOLIO_GROUP_ROUTE}/remove"
4439        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
4440        req_json = {"group_id": group_id}
4441        res = requests.post(url, json=req_json, headers=headers, **self._request_params)
4442
4443        if not res.ok:
4444            error_msg = self._try_extract_error_code(res)
4445            logger.error(error_msg)
4446            raise BoostedAPIException(f"Failed to delete portfolio group: {error_msg}")
4447
4448        data = res.json()
4449        return data
4450
4451    def set_portfolio_group_for_watchlist(
4452        self,
4453        portfolio_group_id: str,
4454        watchlist_id: str,
4455    ) -> Dict:
4456        """
4457        Set portfolio group for watchlist.
4458
4459        Parameters
4460        ----------
4461
4462        portfolio_group_id: str,
4463           UUID str identifying a portfolio group
4464
4465        watchlist_id: str,
4466           UUID str identifying a watchlist
4467
4468
4469        Returns:
4470        -------
4471        Dict {
4472            success: bool
4473            errors:
4474            data: Dict
4475                changed: int
4476        }
4477        """
4478        url = f"{self.base_uri}{WATCHLIST_ROUTE_PREFIX}{DAL_WATCHLIST_ROUTE}/set-portfolio-groups/"
4479        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
4480        req_json = {"portfolio_group_id": portfolio_group_id, "watchlist_id": watchlist_id}
4481        res = requests.post(url, json=req_json, headers=headers, **self._request_params)
4482
4483        if not res.ok:
4484            error_msg = self._try_extract_error_code(res)
4485            logger.error(error_msg)
4486            raise BoostedAPIException(f"Failed to set portfolio group for watchlist: {error_msg}")
4487
4488        return res.json()
4489
4490    def get_ranking_dates(self, model_id: str, portfolio_id: str) -> List[datetime.date]:
4491        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
4492        url = self.base_uri + f"/api/analysis/ranking-dates/{model_id}/{portfolio_id}"
4493        res = requests.get(url, headers=headers, **self._request_params)
4494        self._check_ok_or_err_with_msg(res, "Failed to get ranking dates")
4495        data = res.json().get("ranking_dates", [])
4496
4497        return [parser.parse(d).date() for d in data]
4498
4499    def get_prior_ranking_date(
4500        self, ranking_dates: List[datetime.date], starting_date: datetime.date
4501    ) -> datetime.date:
4502        """
4503        Given a starting date and a list of ranking dates, return the most
4504        recent previous ranking date.
4505        """
4506        # order from most recent to least
4507        ranking_dates.sort(reverse=True)
4508
4509        for d in ranking_dates:
4510            if d <= starting_date:
4511                return d
4512
4513        # if we get here, the starting date is before the earliest ranking date
4514        raise BoostedAPIException(f"No rankins exist on or before {starting_date}")
4515
4516    def _get_risk_factors_descriptors(
4517        self, model_id: str, portfolio_id: str, use_v2: bool = False
4518    ) -> Dict[int, str]:
4519        """Returns a map from descriptor id to descriptor name."""
4520        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
4521
4522        risk_factor = RISK_FACTOR_V2 if use_v2 else RISK_FACTOR
4523        url = self.base_uri + f"/api/{risk_factor}/{model_id}/{portfolio_id}/descriptors"
4524        res = requests.get(url, headers=headers, **self._request_params)
4525
4526        self._check_ok_or_err_with_msg(res, "Failed to get risk factor descriptors")
4527
4528        descriptors = {int(i): name for i, name in res.json().items() if i.isnumeric()}
4529        return descriptors
4530
4531    def get_risk_groups(
4532        self, model_id: str, portfolio_id: str, date: datetime.date, use_v2: bool = False
4533    ) -> List[Dict[str, Any]]:
4534        # first get the group descriptors
4535        descriptors = self._get_risk_factors_descriptors(model_id, portfolio_id, use_v2)
4536
4537        # calculate the most recent prior rankings date. This is the date
4538        # we need to use to query for risk group data.
4539        ranking_dates = self.get_ranking_dates(model_id, portfolio_id)
4540        ranking_date = self.get_prior_ranking_date(ranking_dates, date)
4541        date_str = ranking_date.strftime("%Y-%m-%d")
4542
4543        risk_factor = RISK_FACTOR_V2 if use_v2 else RISK_FACTOR
4544
4545        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
4546        url = self.base_uri + f"/api/{risk_factor}/{model_id}/{portfolio_id}/risk-groups/{date_str}"
4547        res = requests.get(url, headers=headers, **self._request_params)
4548
4549        self._check_ok_or_err_with_msg(
4550            res, f"Failed to get risk factors for {model_id=}, {portfolio_id=}, {date=}"
4551        )
4552
4553        # Response is a list of objects like:
4554        # [
4555        #   [
4556        #     0,
4557        #     14,
4558        #     1
4559        #   ],
4560        #   [
4561        #     25,
4562        #     12,
4563        #     13
4564        #   ],
4565        # 0.67013
4566        # ],
4567        #
4568        # Where each integer in the lists is a descriptor id.
4569
4570        groups = []
4571        for i, row in enumerate(res.json()):
4572            row_map: Dict[str, Any] = {}
4573            # map descriptor id to name
4574            row_map["machine"] = i + 1  # start at 1 not 0
4575            row_map["risk_group_a"] = [descriptors[i] for i in row[0]]
4576            row_map["risk_group_b"] = [descriptors[i] for i in row[1]]
4577            row_map["volatility_explained"] = row[2]
4578            groups.append(row_map)
4579
4580        return groups
4581
4582    def get_risk_factors_discovered_descriptors(
4583        self, model_id: str, portfolio_id: str, date: datetime.date, use_v2: bool = False
4584    ) -> pd.DataFrame:
4585        # first get the group descriptors
4586        descriptors = self._get_risk_factors_descriptors(model_id, portfolio_id)
4587
4588        # calculate the most recent prior rankings date. This is the date
4589        # we need to use to query for risk group data.
4590        ranking_dates = self.get_ranking_dates(model_id, portfolio_id)
4591        ranking_date = self.get_prior_ranking_date(ranking_dates, date)
4592        date_str = ranking_date.strftime("%Y-%m-%d")
4593
4594        risk_factor = RISK_FACTOR_V2 if use_v2 else RISK_FACTOR
4595
4596        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
4597        url = (
4598            self.base_uri
4599            + f"/api/{risk_factor}/{model_id}/{portfolio_id}/risk-descriptors/json/{date_str}"
4600        )
4601        res = requests.get(url, headers=headers, **self._request_params)
4602
4603        self._check_ok_or_err_with_msg(
4604            res, f"Failed to get risk factors for {model_id=}, {portfolio_id=}, {date=}"
4605        )
4606
4607        # Endpoint returns a nested list of floats
4608        df = pd.DataFrame(res.json(), columns=RISK_FACTOR_COLUMNS)
4609
4610        # This flat dataframe represents a potentially doubly nested structure
4611        # of Sector -> (high/low volatility) -> security. We don't care about
4612        # the high/low volatility rows, (which will have negative identifiers)
4613        # so we can filter these out.
4614        df = df[df["identifier"] >= 0]
4615
4616        # now, any values that had a depth of 2 should be set to a depth of 1,
4617        # since we removed the double nesting.
4618        df.replace(to_replace=2, value=1, inplace=True)
4619
4620        # This dataframe represents data that is nested on the UI, so the
4621        # "depth" field indicates which level of nesting each row is at. At this
4622        # point, a depth of 0 indicates a sector, and following depth 1 rows are
4623        # securities within the sector.
4624
4625        # Identifiers in rows with depth 1 will be gbi ids, need to convert to
4626        # symbols.
4627        gbi_ids = df[df["depth"] == 1]["identifier"].tolist()
4628        sec_info = self._get_security_info(gbi_ids)["data"]["securities"]
4629        sec_map = {s["gbiId"]: s["symbol"] for s in sec_info}
4630
4631        def convert_ids(row: pd.Series) -> pd.Series:
4632            # convert each row's "identifier" to the appropriate id type. If the
4633            # depth is 0, the identifier should be a sector, otherwise it should
4634            # be a ticker.
4635            ident = int(row["identifier"])
4636            row["identifier"] = (
4637                descriptors.get(ident).title() if row["depth"] == 0 else sec_map.get(ident)
4638            )
4639            return row
4640
4641        df["depth"] = df["depth"].astype(int)
4642        df["stock_count"] = df["stock_count"].astype(int)
4643        df = df.apply(convert_ids, axis=1)
4644        df = df.reset_index(drop=True)
4645        return df
4646
4647    def get_risk_factors_sectors(
4648        self, model_id: str, portfolio_id: str, date: datetime.date, use_v2: bool = False
4649    ) -> pd.DataFrame:
4650        # first get the group descriptors
4651        sectors = {s["id"]: s["name"] for s in self._get_sector_info()}
4652
4653        # calculate the most recent prior rankings date. This is the date
4654        # we need to use to query for risk group data.
4655        ranking_dates = self.get_ranking_dates(model_id, portfolio_id)
4656        ranking_date = self.get_prior_ranking_date(ranking_dates, date)
4657        date_str = ranking_date.strftime("%Y-%m-%d")
4658
4659        risk_factor = RISK_FACTOR_V2 if use_v2 else RISK_FACTOR
4660
4661        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
4662        url = (
4663            self.base_uri
4664            + f"/api/{risk_factor}/{model_id}/{portfolio_id}/risk-sectors/json/{date_str}"
4665        )
4666        res = requests.get(url, headers=headers, **self._request_params)
4667
4668        self._check_ok_or_err_with_msg(
4669            res, f"Failed to get risk factors for {model_id=}, {portfolio_id=}, {date=}"
4670        )
4671
4672        # Endpoint returns a nested list of floats
4673        df = pd.DataFrame(res.json(), columns=RISK_FACTOR_COLUMNS)
4674
4675        # identifier is a gics sector identifier
4676        df["identifier"] = df["identifier"].apply(lambda i: sectors.get(int(i), None))
4677
4678        # This dataframe represents data that is nested on the UI, so the
4679        # "depth" field indicates which level of nesting each row is at. For
4680        # risk factors sectors, each "depth" represents a level of specificity
4681        # for the sector. E.g. Energy -> Energy Equipment -> Oil & Gas Equipment
4682        df["depth"] = df["depth"].astype(int)
4683        df["stock_count"] = df["stock_count"].astype(int)
4684        df = df.reset_index(drop=True)
4685        return df
4686
4687    def download_complete_portfolio_data(
4688        self, model_id: str, portfolio_id: str, download_filepath: str
4689    ):
4690        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
4691        url = self.base_uri + f"/api/models/{model_id}/{portfolio_id}/excel"
4692
4693        res = requests.get(url, headers=headers, **self._request_params)
4694        self._check_ok_or_err_with_msg(
4695            res, f"Failed to get full data for {model_id=}, {portfolio_id=}"
4696        )
4697
4698        with open(download_filepath, "wb") as f:
4699            f.write(res.content)
4700
4701    def diff_hedge_experiment_portfolio_data(
4702        self,
4703        hedge_experiment_id: str,
4704        comparison_portfolios: List[str],
4705        categories: List[str],
4706    ) -> Dict:
4707        qry = """
4708        query diffHedgeExperimentPortfolios(
4709            $input: DiffHedgeExperimentPortfoliosInput!
4710        ) {
4711            diffHedgeExperimentPortfolios(input: $input) {
4712            data {
4713                diffs {
4714                    volatility {
4715                        date
4716                        vol5D
4717                        vol10D
4718                        vol21D
4719                        vol21D
4720                        vol63D
4721                        vol126D
4722                        vol189D
4723                        vol252D
4724                        vol315D
4725                        vol378D
4726                        vol441D
4727                        vol504D
4728                    }
4729                    performance {
4730                        date
4731                        value
4732                    }
4733                    performanceGrid {
4734                        headerRow
4735                        values
4736                    }
4737                    factors {
4738                        date
4739                        momentum
4740                        growth
4741                        size
4742                        value
4743                        dividendYield
4744                        volatility
4745                    }
4746                }
4747            }
4748            errors
4749            }
4750        }
4751        """
4752        headers = {"Authorization": "ApiKey " + self.api_key}
4753        params = {
4754            "hedgeExperimentId": hedge_experiment_id,
4755            "portfolioIds": comparison_portfolios,
4756            "categories": categories,
4757        }
4758        resp = requests.post(
4759            f"{self.base_uri}/api/graphql",
4760            json={"query": qry, "variables": params},
4761            headers=headers,
4762            params=self._request_params,
4763        )
4764
4765        json_resp = resp.json()
4766
4767        # graphql endpoints typically return 200 or 400 status codes, so we must
4768        # check if we have any errors, even with a 200
4769        if (resp.ok and "errors" in json_resp) or not resp.ok:
4770            error_msg = self._try_extract_error_code(resp)
4771            logger.error(error_msg)
4772            raise BoostedAPIException(
4773                (
4774                    f"Failed to get portfolio diffs for {hedge_experiment_id=}: "
4775                    f"{resp.status_code=}; {error_msg=}"
4776                )
4777            )
4778
4779        diffs = json_resp["data"]["diffHedgeExperimentPortfolios"]["data"]["diffs"]
4780        comparisons = {}
4781        for pf, cmp in zip(comparison_portfolios, diffs):
4782            res: Dict[str, Any] = {
4783                "performance": None,
4784                "performanceGrid": None,
4785                "factors": None,
4786                "volatility": None,
4787            }
4788            if "performanceGrid" in cmp:
4789                grid = cmp["performanceGrid"]
4790                grid_df = pd.DataFrame(grid["values"], columns=grid["headerRow"])
4791                res["performanceGrid"] = grid_df
4792            if "performance" in cmp:
4793                perf_df = pd.DataFrame(cmp["performance"]).set_index("date")
4794                perf_df.index = pd.to_datetime(perf_df.index)
4795                res["performance"] = perf_df
4796            if "volatility" in cmp:
4797                vol_df = pd.DataFrame(cmp["volatility"]).set_index("date")
4798                vol_df.index = pd.to_datetime(vol_df.index)
4799                res["volatility"] = vol_df
4800            if "factors" in cmp:
4801                factors_df = pd.DataFrame(cmp["factors"]).set_index("date")
4802                factors_df.index = pd.to_datetime(factors_df.index)
4803                res["factors"] = factors_df
4804            comparisons[pf] = res
4805        return comparisons
4806
4807    def get_signal_strength(self, model_id: str, portfolio_id: str) -> pd.DataFrame:
4808        url = self.base_uri + f"/api/analysis/signal_strength/{model_id}/{portfolio_id}"
4809        headers = {"Authorization": "ApiKey " + self.api_key}
4810
4811        logger.info(f"Retrieving portfolio signals for {model_id=}, {portfolio_id=}")
4812
4813        # Response format is a json object with a "header_row" key for column
4814        # names, and then a nested list of data.
4815        resp = requests.get(url, headers=headers, **self._request_params)
4816        self._check_ok_or_err_with_msg(
4817            resp, f"Failed to get portfolio signals for {model_id=}, {portfolio_id=}"
4818        )
4819
4820        data = resp.json()
4821
4822        df = pd.DataFrame(data=data["data"], columns=data["header_row"])
4823        df["Date"] = pd.to_datetime(df["Date"])
4824        df = df.set_index("Date")
4825        return df.astype(float)
4826
4827    def get_rolling_signal_strength(self, model_id: str, portfolio_id: str) -> pd.DataFrame:
4828        url = self.base_uri + f"/api/analysis/signal_strength_rolling/{model_id}/{portfolio_id}"
4829        headers = {"Authorization": "ApiKey " + self.api_key}
4830
4831        logger.info(f"Retrieving rolling portfolio signals for {model_id=}, {portfolio_id=}")
4832
4833        # Response format is a json object with a "header_row" key for column
4834        # names, and then a nested list of data.
4835        resp = requests.get(url, headers=headers, **self._request_params)
4836        self._check_ok_or_err_with_msg(
4837            resp, f"Failed to get rolling portfolio signals for {model_id=}, {portfolio_id=}"
4838        )
4839
4840        data = resp.json()
4841
4842        df = pd.DataFrame(data=data["data"], columns=data["header_row"])
4843        df["Date"] = pd.to_datetime(df["Date"])
4844        df = df.set_index("Date")
4845        return df.astype(float)
4846
4847    def get_portfolio_quantiles(
4848        self,
4849        model_id: str,
4850        portfolio_id: str,
4851        id_type: Literal["TICKER", "ISIN"] = "TICKER",
4852    ):
4853        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
4854        date = datetime.date.today().strftime("%Y-%m-%d")
4855
4856        payload = {
4857            "model_id": model_id,
4858            "portfolio_id": portfolio_id,
4859            "fields": ["quantile"],
4860            "min_date": date,
4861            "max_date": date,
4862            "return_format": "json",
4863        }
4864        # TODO: Later change this URI to not use the watchlist prefix. It is misnamed.
4865        url = f"{self.base_uri}{WATCHLIST_ROUTE_PREFIX}{DAL_PA_ROUTE}/get-data/"
4866
4867        res: requests.Response = requests.post(
4868            url, json=payload, headers=headers, **self._request_params
4869        )
4870        self._check_ok_or_err_with_msg(res, "Unable to get quantile data")
4871
4872        resp: Dict = res.json()
4873        quantile_index = resp["field_map"]["Quantile"]
4874        quantile_data = [[c[quantile_index] for c in r] for r in resp["data"]]
4875        date_cols = pd.to_datetime(resp["columns"])
4876
4877        # Need to map gbi id's to isins or tickers
4878        gbi_ids = [int(i) for i in resp["rows"]]
4879        security_info = self._get_security_info(gbi_ids)
4880
4881        # We now have security data, go through and create a map from internal
4882        # gbi id to client facing identifier
4883        id_key = "isin" if id_type == "ISIN" else "symbol"
4884        gbi_identifier_map = {
4885            sec["gbiId"]: sec[id_key] for sec in security_info["data"]["securities"]
4886        }
4887
4888        df = pd.DataFrame(quantile_data, index=gbi_ids, columns=date_cols).transpose()
4889        df = df.rename(columns=gbi_identifier_map)
4890        return df
4891
4892    def get_similar_stocks(
4893        self,
4894        model_id: str,
4895        portfolio_id: str,
4896        symbol_list: List[str],
4897        date: BoostedDate,
4898        identifier_type: Literal["TICKER", "ISIN"],
4899        preferred_country: Optional[str] = None,
4900        preferred_currency: Optional[str] = None,
4901    ) -> pd.DataFrame:
4902        date_str = convert_date(date).strftime("%Y-%m-%d")
4903
4904        sec_data = self.getGbiIdFromIdentCountryCurrencyDate(
4905            ident_country_currency_dates=[
4906                DateIdentCountryCurrency(
4907                    date=datetime.date.today().isoformat(),
4908                    identifier=s,
4909                    id_type=(
4910                        ColumnSubRole.SYMBOL if identifier_type == "TICKER" else ColumnSubRole.ISIN
4911                    ),
4912                    country=preferred_country,
4913                    currency=preferred_currency,
4914                )
4915                for s in symbol_list
4916            ]
4917        )
4918
4919        gbi_id_ident_map: Dict[int, str] = {}
4920        for sec in sec_data:
4921            ident = sec.ticker if identifier_type == "TICKER" else sec.isin_info.identifier
4922            gbi_id_ident_map[sec.gbi_id] = ident
4923        gbi_ids = list(gbi_id_ident_map.keys())
4924
4925        qry = """
4926          query GetSimilarStocks(
4927            $modelId: ID!
4928            $portfolioId: ID!
4929            $gbiIds: [Int]!
4930            $startDate: String!
4931            $endDate: String!
4932            $includeCorrelation: Boolean
4933          ) {
4934            similarStocks(
4935              modelId: $modelId,
4936              portfolioId: $portfolioId,
4937              gbiIds: $gbiIds,
4938              startDate: $startDate,
4939              endDate: $endDate,
4940              includeCorrelation: $includeCorrelation
4941            ) {
4942              gbiId
4943              overallSimilarityScore
4944              priceSimilarityScore
4945              factorSimilarityScore
4946              correlation
4947            }
4948          }
4949        """
4950        variables = {
4951            "startDate": date_str,
4952            "endDate": date_str,
4953            "modelId": model_id,
4954            "portfolioId": portfolio_id,
4955            "gbiIds": gbi_ids,
4956            "includeCorrelation": True,
4957        }
4958
4959        resp = self._get_graphql(
4960            qry, variables=variables, error_msg_prefix="Failed to get similar stocks result: "
4961        )
4962        df = pd.DataFrame(resp["data"]["similarStocks"])
4963
4964        # Now that we have the rest of the securities in the portfolio, we need
4965        # to map them back to the correct identifiers
4966        all_gbi_ids = df["gbiId"].tolist()
4967        sec_info = self._get_security_info(all_gbi_ids)
4968        for s in sec_info["data"]["securities"]:
4969            ident = s["symbol"] if identifier_type == "TICKER" else s["isin"]
4970            gbi_id_ident_map[s["gbiId"]] = ident
4971        df["identifier"] = df["gbiId"].map(gbi_id_ident_map)
4972        df = df.set_index("identifier")
4973        return df.drop("gbiId", axis=1)
4974
4975    def get_portfolio_trades(
4976        self,
4977        model_id: str,
4978        portfolio_id: str,
4979        start_date: Optional[BoostedDate] = None,
4980        end_date: Optional[BoostedDate] = None,
4981    ) -> pd.DataFrame:
4982        if not end_date:
4983            end_date = datetime.date.today()
4984        end_date = convert_date(end_date)
4985
4986        if not start_date:
4987            # default to a year of data
4988            start_date = end_date - datetime.timedelta(days=365)
4989        start_date = convert_date(start_date)
4990
4991        start_date_str = start_date.strftime("%Y-%m-%d")
4992        end_date_str = end_date.strftime("%Y-%m-%d")
4993
4994        if end_date - start_date > datetime.timedelta(days=365 * 7):
4995            raise BoostedAPIException(
4996                f"Date range ({start_date_str}, {end_date_str}) too large, max 7 years"
4997            )
4998
4999        url = f"{self.base_uri}{ROUTE_PREFIX}{DAL_PA_ROUTE}/get-data/"
5000        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
5001        payload = {
5002            "model_id": model_id,
5003            "portfolio_id": portfolio_id,
5004            "fields": ["price", "shares_traded", "shares_owned"],
5005            "min_date": start_date_str,
5006            "max_date": end_date_str,
5007            "return_format": "json",
5008        }
5009
5010        res: requests.Response = requests.post(
5011            url, json=payload, headers=headers, **self._request_params
5012        )
5013        self._check_ok_or_err_with_msg(res, "Unable to get portfolio trades data")
5014
5015        data = res.json()
5016        gbi_ids = [int(ident) for ident in data["rows"]]
5017
5018        # need both isin and ticker to distinguish between possible duplicates
5019        isin_map = {
5020            str(s["gbiId"]): s["isin"]
5021            for s in self._get_security_info(gbi_ids)["data"]["securities"]
5022        }
5023        ticker_map = {
5024            str(s["gbiId"]): s["symbol"]
5025            for s in self._get_security_info(gbi_ids)["data"]["securities"]
5026        }
5027
5028        # construct individual dataframes for each security, then join them together
5029        dfs: List[pd.DataFrame] = []
5030        full_data = data["data"]
5031        for i, gbi_id in enumerate(data["rows"]):
5032            df = pd.DataFrame(
5033                index=pd.to_datetime(data["columns"]), columns=data["fields"], data=full_data[i]
5034            )
5035            # drop rows where no shares are owned or traded
5036            df.drop(
5037                df.loc[((df["shares_owned"] == 0.0) & (df["shares_traded"] == 0.0))].index,
5038                inplace=True,
5039            )
5040            df["isin"] = isin_map[gbi_id]
5041            df["ticker"] = ticker_map[gbi_id]
5042            dfs.append(df)
5043
5044        full_df = pd.concat(dfs)
5045        full_df["date"] = full_df.index
5046        full_df.sort_index(inplace=True)
5047        full_df.reset_index(drop=True, inplace=True)
5048
5049        # reorder the columns to match the spreadsheet
5050        columns = ["isin", "ticker", "date", *data["fields"]]
5051        return full_df[columns]
5052
5053    def get_ideas(
5054        self,
5055        model_id: str,
5056        portfolio_id: str,
5057        investment_horizon: Literal["1M", "3M", "1Y"] = "1M",
5058        delta_horizon: str = "1M",
5059    ):
5060        if investment_horizon not in ("1M", "3M", "1Y"):
5061            raise BoostedAPIException(f"Invalid investment horizon: {investment_horizon}")
5062
5063        if delta_horizon not in ("1W", "1M", "3M", "6M", "9M", "1Y"):
5064            raise BoostedAPIException(f"Invalid delta horizon: {delta_horizon}")
5065
5066        # First compute dates based on the delta horizon. "0D" is the latest rebalance.
5067        try:
5068            dates = self._get_portfolio_rebalance_from_periods(
5069                portfolio_id=portfolio_id, rel_periods=["0D", delta_horizon]
5070            )
5071        except Exception:
5072            raise BoostedAPIException(
5073                f"Portfolio {portfolio_id} does not exist or you do not have permission to view it."
5074            )
5075        end_date = dates[0].strftime("%Y-%m-%d")
5076        start_date = dates[1].strftime("%Y-%m-%d")
5077
5078        resp = self._get_graphql(
5079            graphql_queries.GET_IDEAS_QUERY,
5080            variables={
5081                "modelId": model_id,
5082                "portfolioId": portfolio_id,
5083                "horizon": investment_horizon,
5084                "deltaHorizon": delta_horizon,
5085                "startDate": start_date,
5086                "endDate": end_date,
5087                # Note: market data date is needed to fetch market cap.
5088                # we don't fetch that data from this endpoint so we stub
5089                # out the mandatory parameter with the end date requested
5090                "marketDataDate": end_date,
5091            },
5092            error_msg_prefix="Failed to get ideas: ",
5093        )
5094        # rows is a list of dicts like:
5095        # {
5096        #   "category": "Strong Sell",
5097        #   "dividendYield": 0.0,
5098        #   "reason": "Boosted Insights has given this stock...",
5099        #   "rating": 0.458167,
5100        #   "ratingDelta": 0.438087,
5101        #   "risk": {
5102        #     "text": "high"
5103        #   },
5104        #   "security": {
5105        #     "symbol": "BA"
5106        #   }
5107        # }
5108        try:
5109            rows = resp["data"]["recommendations"]["recommendations"]
5110            data = [
5111                {
5112                    "symbol": r["security"]["symbol"],
5113                    "recommendation": r["category"],
5114                    "rating": r["rating"],
5115                    "rating_delta": r["ratingDelta"],
5116                    "dividend_yield": r["dividendYield"],
5117                    "predicted_excess_return_1m": r["ER"]["oneMonth"],
5118                    "predicted_excess_return_3m": r["ER"]["threeMonth"],
5119                    "predicted_excess_return_1y": r["ER"]["oneYear"],
5120                    "risk": r["risk"]["text"],
5121                    "reward": r["reward"]["text"],
5122                    "reason": r["reason"],
5123                }
5124                for r in rows
5125            ]
5126            df = pd.DataFrame(data)
5127            df.set_index("symbol", inplace=True)
5128        except Exception:
5129            # Don't show old exception info to client
5130            raise BoostedAPIException(
5131                "No recommendations found, try selecting another horizon."
5132            ) from None
5133
5134        return df
5135
5136    def get_stock_recommendations(
5137        self,
5138        model_id: str,
5139        portfolio_id: str,
5140        symbols: Optional[List[str]] = None,
5141        investment_horizon: Literal["1M", "3M", "1Y"] = "1M",
5142    ) -> pd.DataFrame:
5143        model_stocks = self._get_model_stocks(model_id)
5144
5145        symbols_to_gbiids = {s.ticker: s.gbi_id for s in model_stocks}
5146        gbi_ids_to_symbols = {s.gbi_id: s.ticker for s in model_stocks}
5147
5148        variables: Dict[str, Any] = {
5149            "strategyId": portfolio_id,
5150        }
5151        if symbols:
5152            variables["gbiIds"] = [
5153                symbols_to_gbiids.get(symbol) for symbol in symbols if symbols_to_gbiids.get(symbol)
5154            ]
5155        try:
5156            recs = self._get_graphql(
5157                graphql_queries.MULTI_STOCK_RECOMMENDATION_QUERY,
5158                variables=variables,
5159                log_error=False,
5160            )["data"]["currentRecommendationsFull"]
5161        except BoostedAPIException:
5162            raise BoostedAPIException(f"Error getting recommendations for strategy {portfolio_id}")
5163
5164        data = []
5165        recommendation_key = f"recommendation{investment_horizon}"
5166        for rec in recs:
5167            # Keys to rec are:
5168            # ['ER', 'rewardCategories', 'riskCategories', 'reasons',
5169            #  'recommendation', 'rewardCategory', 'riskCategory']
5170            # need to flatten these out and add to a DF
5171            rec_data = rec[recommendation_key]
5172            reasons_dict = {r["type"]: r["text"] for r in rec_data["reasons"]}
5173            row = {
5174                "symbol": gbi_ids_to_symbols[rec["gbiId"]],
5175                "recommendation": rec_data["currentCategory"],
5176                "predicted_excess_return_1m": rec_data["ER"]["oneMonth"],
5177                "predicted_excess_return_3m": rec_data["ER"]["threeMonth"],
5178                "predicted_excess_return_1y": rec_data["ER"]["oneYear"],
5179                "risk": rec_data["risk"]["text"],
5180                "reward": rec_data["reward"]["text"],
5181                "reasons": reasons_dict,
5182            }
5183
5184            data.append(row)
5185        df = pd.DataFrame(data)
5186        df.set_index("symbol", inplace=True)
5187        return df
5188
5189    # NOTE: this could be easily expanded to the entire stockRecommendation
5190    # entity, but that only includes all horizons' excess returns and risk/reward
5191    # which we already get from getIdeas
5192    def get_stock_recommendation_reasons(
5193        self,
5194        model_id: str,
5195        portfolio_id: str,
5196        investment_horizon: Literal["1M", "3M", "1Y"] = "1M",
5197        symbols: Optional[List[str]] = None,
5198    ) -> Dict[str, Optional[List[str]]]:
5199        if investment_horizon not in ("1M", "3M", "1Y"):
5200            raise BoostedAPIException(f"Invalid investment horizon: {investment_horizon}")
5201
5202        # "0D" is the latest rebalance - its all we have in terms of recs
5203        dates = self._get_portfolio_rebalance_from_periods(
5204            portfolio_id=portfolio_id, rel_periods=["0D"]
5205        )
5206        date = dates[0].strftime("%Y-%m-%d")
5207
5208        model_stocks = self._get_model_stocks(model_id)
5209
5210        symbols_to_gbiids = {s.ticker: s.gbi_id for s in model_stocks}
5211        if symbols is None:  # potentially iterate through all holdings
5212            symbols = symbols_to_gbiids.keys()  # type: ignore
5213
5214        reasons: Dict[str, Optional[List[str]]] = {}
5215        for sym in symbols:
5216            # it's possible that a passed symbol was not actually a portfolio holding
5217            try:
5218                gbi_id = symbols_to_gbiids[sym]
5219            except KeyError:
5220                logger.warning(f"Symbol={sym} not found for in universe on {date=}")
5221                reasons[sym] = None
5222                continue
5223
5224            try:
5225                recs = self._get_graphql(
5226                    graphql_queries.STOCK_RECOMMENDATION_QUERY,
5227                    variables={
5228                        "modelId": model_id,
5229                        "portfolioId": portfolio_id,
5230                        "horizon": investment_horizon,
5231                        "gbiId": gbi_id,
5232                        "date": date,
5233                    },
5234                    log_error=False,
5235                )
5236                reasons[sym] = [
5237                    reason["text"] for reason in recs["data"]["stockRecommendation"]["reasons"]
5238                ]
5239            except BoostedAPIException:
5240                logger.warning(f"No recommendation for: {sym}, skipping...")
5241        return reasons
5242
5243    def get_stock_mapping_alternatives(
5244        self,
5245        isin: Optional[str] = None,
5246        symbol: Optional[str] = None,
5247        country: Optional[str] = None,
5248        currency: Optional[str] = None,
5249        asof_date: Optional[BoostedDate] = None,
5250    ) -> Dict:
5251        """
5252        Return the stock mapping for the given criteria,
5253        also suggestions for alternate matches,
5254        if the mapping is not what is wanted
5255
5256
5257            Parameters [One of either ISIN or SYMBOL must be provided]
5258            ----------
5259            isin: Optional[str]
5260                search by ISIN
5261            symbol: Optional[str]
5262                search by Ticker Symbol
5263            country: Optional[str]
5264                Additionally filter by country code - ex: None, "ANY", "p_USA", "CAN"
5265            currency: Optional[str]
5266                Additionally filter by currency code - ex: None, "ANY", "p_USD", "CAD"
5267            asof_date: Optional[date]
5268                as of which date to perform the search, default is today()
5269
5270            Note: country/currency filter starting with "p_" indicates
5271                  only a soft preference but allows other matches
5272
5273        Returns
5274        -------
5275        Dictionary Representing this 'MapSecurityResponse' structure:
5276
5277        class MapSecurityResponse():
5278            stock_mapping: Optional[SecurityInfo]
5279               The mapping we would perform given your inputs
5280
5281            alternatives: Optional[List[SecurityInfo]]
5282               Alternative suggestions based on your input
5283
5284            error: Optional[str]
5285
5286        class SecurityInfo():
5287            gbi_id: int
5288            isin: str
5289            symbol: Optional[str]
5290            country: str
5291            currency: str
5292            name: str
5293            from_date: date
5294            to_date: date
5295            is_primary_trading_item: bool
5296
5297        """
5298
5299        url = f"{self.base_uri}/api/stock-mapping/alternatives"
5300        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
5301        req_json: Dict = {
5302            "isin": isin,
5303            "symbol": symbol,
5304            "countryPreference": country,
5305            "currencyPreference": currency,
5306        }
5307
5308        if asof_date:
5309            req_json["date"] = convert_date(asof_date).isoformat()
5310
5311        res = requests.post(url, json=req_json, headers=headers, **self._request_params)
5312
5313        if not res.ok:
5314            error_msg = self._try_extract_error_code(res)
5315            logger.error(error_msg)
5316            raise BoostedAPIException(f"Failed to get user watchlists: {error_msg}")
5317
5318        data = res.json()
5319        return data
5320
5321    def get_pros_cons_for_stocks(
5322        self,
5323        model_id: Optional[str] = None,
5324        symbols: Optional[List[str]] = None,
5325        preferred_country: Optional[str] = None,
5326        preferred_currency: Optional[str] = None,
5327    ) -> Dict[str, Dict[str, List]]:
5328        if symbols:
5329            ident_objs = [
5330                DateIdentCountryCurrency(
5331                    date=datetime.date.today().strftime("%Y-%m-%d"),
5332                    identifier=symbol,
5333                    country=preferred_country,
5334                    currency=preferred_currency,
5335                    id_type=ColumnSubRole.SYMBOL,
5336                )
5337                for symbol in symbols
5338            ]
5339            sec_objs = self.getGbiIdFromIdentCountryCurrencyDate(
5340                ident_country_currency_dates=ident_objs
5341            )
5342            gbi_id_ticker_map = {sec.gbi_id: sec.ticker for sec in sec_objs if sec}
5343        elif model_id:
5344            gbi_id_ticker_map = {
5345                sec.gbi_id: sec.ticker for sec in self._get_model_stocks(model_id=model_id)
5346            }
5347        gbi_id_pros_cons_map = {}
5348        gbi_ids = list(gbi_id_ticker_map.keys())
5349        data = self._get_graphql(
5350            query=graphql_queries.GET_PROS_CONS_QUERY,
5351            variables={"gbiIds": gbi_ids},
5352            error_msg_prefix="Failed to get pros/cons:",
5353        )
5354        gbi_id_pros_cons_map = {
5355            row["gbiId"]: {"pros": row["pros"], "cons": row["cons"]}
5356            for row in data["data"]["bulkSecurityProsCons"]
5357        }
5358
5359        return {
5360            gbi_id_ticker_map[gbi_id]: pros_cons
5361            for gbi_id, pros_cons in gbi_id_pros_cons_map.items()
5362        }
5363
5364    def generate_theme(self, theme_name: str, stock_universes: List[ThemeUniverse]) -> str:
5365        # First get universe name and id mappings
5366        try:
5367            resp = self._get_graphql(
5368                query=graphql_queries.GET_MARKET_TRENDS_UNIVERSES_QUERY, variables={}
5369            )
5370            data = resp["data"]["getMarketTrendsUniverses"]
5371        except Exception:
5372            raise BoostedAPIException(f"Failed to load market trends universes mapping")
5373
5374        universe_name_to_id = {u["name"]: u["id"] for u in data}
5375        universe_ids = [universe_name_to_id[u.value] for u in stock_universes]
5376        try:
5377            resp = self._get_graphql(
5378                query=graphql_queries.GENERATE_THEME_QUERY,
5379                variables={"input": {"themeName": theme_name, "universeIds": universe_ids}},
5380            )
5381            data = resp["data"]["generateTheme"]
5382        except Exception:
5383            raise BoostedAPIException(f"Failed to generate theme: {theme_name}")
5384
5385        if not data["success"]:
5386            raise BoostedAPIException(f"Failed to generate theme: {theme_name}")
5387
5388        logger.info(
5389            f"Successfully generated theme: {theme_name}. The theme ID is {data['themeId']}"
5390        )
5391        return data["themeId"]
5392
5393    def _get_stock_universe_id(self, universe: ThemeUniverse) -> str:
5394        try:
5395            resp = self._get_graphql(
5396                query=graphql_queries.GET_MARKET_TRENDS_UNIVERSES_QUERY, variables={}
5397            )
5398            data = resp["data"]["getMarketTrendsUniverses"]
5399        except Exception:
5400            raise BoostedAPIException(f"Failed to load market trends universes mapping")
5401
5402        for u in data:
5403            if u["name"] == universe.value:
5404                universe_id = u["id"]
5405                return universe_id
5406
5407        raise BoostedAPIException(f"Failed to find universe: {universe.value}")
5408
5409    def get_themes_for_stock_universe(
5410        self,
5411        stock_universe: ThemeUniverse,
5412        start_date: Optional[BoostedDate] = None,
5413        end_date: Optional[BoostedDate] = None,
5414        language: Optional[Union[str, Language]] = None,
5415    ) -> List[Dict]:
5416        """Get all themes data for a particular stock universe
5417        (start_date, end_date) are used to calculate the theme importance for ranking purpose. If
5418        None, default to past 30 days
5419        Returns: A list of below dictionaries
5420        {
5421            themeId: str
5422            themeName: str
5423            themeImportance: float
5424            volatility: float
5425            positiveStockPerformance: float
5426            negativeStockPerformance: float
5427        }
5428        """
5429        translate = functools.partial(self.translate_text, language)
5430        # First get universe name and id mappings
5431        universe_id = self._get_stock_universe_id(stock_universe)
5432
5433        start_date_iso, end_date_iso = get_valid_iso_dates(start_date, end_date)
5434
5435        try:
5436            resp = self._get_graphql(
5437                query=graphql_queries.GET_THEMES,
5438                variables={
5439                    "type": "UNIVERSE",
5440                    "id": universe_id,
5441                    "startDate": start_date_iso,
5442                    "endDate": end_date_iso,
5443                    "deltaHorizon": "",  # not needed here
5444                },
5445            )
5446            data = resp["data"]["themes"]
5447        except Exception:
5448            raise BoostedAPIException(
5449                f"Failed to get themes for stock universe: {stock_universe.name}"
5450            )
5451
5452        for theme_data in data:
5453            theme_data["themeName"] = translate(theme_data["themeName"])
5454        return data
5455
5456    def get_themes_for_stock(
5457        self,
5458        isin: str,
5459        currency: Optional[str] = None,
5460        country: Optional[str] = None,
5461        start_date: Optional[BoostedDate] = None,
5462        end_date: Optional[BoostedDate] = None,
5463        language: Optional[Union[str, Language]] = None,
5464    ) -> List[Dict]:
5465        """Get all themes data for a particular stock
5466        (ISIN, currency, country) compose a unique identifier for a stock for us to map to GBI ID
5467        (start_date, end_date) are used to calculate the theme importance for ranking purpose. If
5468        None, default to past 30 days
5469
5470        Returns
5471        A list of below dictionaries
5472        {
5473            themeId: str
5474            themeName: str
5475            importanceScore: float
5476            similarityScore: float
5477            positiveThemeRelation: bool
5478            reason: String
5479        }
5480        """
5481        translate = functools.partial(self.translate_text, language)
5482        security_info = self.get_stock_mapping_alternatives(
5483            isin, country=country, currency=currency
5484        )
5485        gbi_id = security_info["stock_mapping"]["gbi_id"]
5486
5487        if (start_date and not end_date) or (end_date and not start_date):
5488            raise BoostedAPIException("Must provide both start and end dates or neither")
5489        elif not end_date and not start_date:
5490            end_date = datetime.date.today()
5491            start_date = end_date - datetime.timedelta(days=30)
5492            end_date = end_date.isoformat()
5493            start_date = start_date.isoformat()
5494        else:
5495            if isinstance(start_date, datetime.date):
5496                start_date = start_date.isoformat()
5497            if isinstance(end_date, datetime.date):
5498                end_date = end_date.isoformat()
5499
5500        try:
5501            resp = self._get_graphql(
5502                query=graphql_queries.GET_THEMES_FOR_STOCK_WITH_REASONS,
5503                variables={"gbiId": gbi_id, "startDate": start_date, "endDate": end_date},
5504            )
5505            data = resp["data"]["themesForStockWithReasons"]
5506        except Exception:
5507            raise BoostedAPIException(f"Failed to get themes for stock: {isin}")
5508
5509        for item in data:
5510            item["themeName"] = translate(item["themeName"])
5511            item["reason"] = translate(item["reason"])
5512        return data
5513
5514    def get_stock_news(
5515        self,
5516        time_horizon: NewsHorizon,
5517        isin: str,
5518        currency: Optional[str] = None,
5519        country: Optional[str] = None,
5520        language: Optional[Union[str, Language]] = None,
5521    ) -> Dict:
5522        """
5523        The API to get a stock's news summary for a given time horizon, the topics summarized by
5524        these news and the corresponding news to these topics
5525        Returns
5526        -------
5527        A nested dictionary in the following format:
5528        {
5529            summary: str
5530            topics: [
5531                {
5532                    topicId: str
5533                    topicLabel: str
5534                    topicDescription: str
5535                    topicPolarity: str
5536                    newsItems: [
5537                        {
5538                            newsId: str
5539                            headline: str
5540                            url: str
5541                            summary: str
5542                            source: str
5543                            publishedAt: str
5544                        }
5545                    ]
5546                }
5547            ]
5548            other_news_count: int
5549        }
5550        """
5551        translate = functools.partial(self.translate_text, language)
5552        security_info = self.get_stock_mapping_alternatives(
5553            isin, country=country, currency=currency
5554        )
5555        gbi_id = security_info["stock_mapping"]["gbi_id"]
5556
5557        try:
5558            resp = self._get_graphql(
5559                query=graphql_queries.GET_STOCK_NEWS_QUERY,
5560                variables={"gbiId": gbi_id, "deltaHorizon": time_horizon.value},
5561            )
5562            data = resp["data"]
5563        except Exception:
5564            raise BoostedAPIException(f"Failed to get themes for stock: {isin}")
5565
5566        outputs: Dict[str, Any] = {}
5567        outputs["summary"] = translate(data["getStockNewsSummary"]["summary"])
5568        # Return the top 10 topics
5569        outputs["topics"] = data["getStockNewsTopics"]["topics"][:10]
5570
5571        for topic in outputs["topics"]:
5572            topic["topicLabel"] = translate(topic["topicLabel"])
5573            topic["topicDescription"] = translate(topic["topicDescription"])
5574
5575        other_news_count = 0
5576        for source_count in data["getStockNewsSummary"]["sourceCounts"]:
5577            other_news_count += source_count["count"]
5578
5579        for topic in outputs["topics"]:
5580            other_news_count -= len(topic["newsItems"])
5581
5582        outputs["other_news_count"] = other_news_count
5583
5584        return outputs
5585
5586    def get_theme_details(
5587        self,
5588        theme_id: str,
5589        universe: ThemeUniverse,
5590        language: Optional[Union[str, Language]] = None,
5591    ) -> Dict[str, Any]:
5592        translate = functools.partial(self.translate_text, language)
5593        universe_id = self._get_stock_universe_id(universe)
5594        date = datetime.date.today()
5595        prev_date = date - datetime.timedelta(days=30)
5596        result = self._get_graphql(
5597            query=graphql_queries.GET_THEME_DEEPDIVE_DETAILS,
5598            variables={
5599                "deltaHorizon": "1W",
5600                "startDate": prev_date.strftime("%Y-%m-%d"),
5601                "endDate": date.strftime("%Y-%m-%d"),
5602                "id": universe_id,
5603                "themeId": theme_id,
5604                "type": "UNIVERSE",
5605            },
5606            error_msg_prefix="Failed to get theme details",
5607        )["data"]["marketThemes"]
5608
5609        gbi_id_stock_data_map: Dict[int, Dict] = {}
5610
5611        stocks = []
5612        for stock_info in result["stockInfos"]:
5613            gbi_id_stock_data_map[stock_info["gbiId"]] = stock_info["security"]
5614            stocks.append(
5615                {
5616                    "isin": stock_info["security"]["isin"],
5617                    "name": stock_info["security"]["name"],
5618                    "reason": translate(stock_info["polarityReasonScores"]["reason"]),
5619                    "positive_theme_relation": stock_info["polarityReasonScores"][
5620                        "positiveThemeRelation"
5621                    ],
5622                    "theme_stock_impact_score": stock_info["polarityReasonScores"][
5623                        "similarityScore"
5624                    ],
5625                }
5626            )
5627
5628        impacts = []
5629        for impact in result["impactInfos"]:
5630            articles = [
5631                {
5632                    "title": newsitem["headline"],
5633                    "url": newsitem["url"],
5634                    "source": newsitem["source"],
5635                    "publish_date": newsitem["publishedAt"],
5636                }
5637                for newsitem in impact["newsItems"]
5638            ]
5639
5640            impact_stocks = []
5641            for impact_stock_data in impact["stocks"]:
5642                stock_metadata = gbi_id_stock_data_map[impact_stock_data["gbiId"]]
5643                impact_stocks.append(
5644                    {
5645                        "isin": stock_metadata["isin"],
5646                        "name": stock_metadata["name"],
5647                        "positive_impact_relation": impact_stock_data["positiveThemeRelation"],
5648                    }
5649                )
5650
5651            impact_dict = {
5652                "impact_name": translate(impact["impactName"]),
5653                "impact_description": translate(impact["impactDescription"]),
5654                "impact_score": impact["impactScore"],
5655                "articles": articles,
5656                "impact_stocks": impact_stocks,
5657            }
5658            impacts.append(impact_dict)
5659
5660        developments = []
5661        for dev in result["themeDevelopments"]:
5662            developments.append(
5663                {
5664                    "name": dev["label"],
5665                    "article_count": dev["articleCount"],
5666                    "date": parser.parse(dev["date"]).date(),
5667                    "description": dev["description"],
5668                    "is_major_development": dev["isMajorDevelopment"],
5669                    "sentiment": dev["sentiment"],
5670                    "news": [
5671                        {
5672                            "headline": entry["headline"],
5673                            "published_at": parser.parse(entry["publishedAt"]),
5674                            "source": entry["source"],
5675                            "url": entry["url"],
5676                        }
5677                        for entry in dev["news"]
5678                    ],
5679                }
5680            )
5681
5682        developments = sorted(developments, key=lambda d: d["date"], reverse=True)
5683
5684        output = {
5685            "theme_name": translate(result["themeName"]),
5686            "theme_summary": translate(result["themeDescription"]),
5687            "impacts": impacts,
5688            "stocks": stocks,
5689            "developments": developments,
5690        }
5691        return output
5692
5693    def get_all_theme_metadata(
5694        self, language: Optional[Union[str, Language]] = None
5695    ) -> List[Dict[str, Any]]:
5696        translate = functools.partial(self.translate_text, language)
5697        result = self._get_graphql(
5698            graphql_queries.GET_ALL_THEMES,
5699            variables={"universeIds": None},
5700            error_msg_prefix="Failed to fetch all themes metadata",
5701        )
5702
5703        try:
5704            resp = self._get_graphql(
5705                query=graphql_queries.GET_MARKET_TRENDS_UNIVERSES_QUERY, variables={}
5706            )
5707            data = resp["data"]["getMarketTrendsUniverses"]
5708        except Exception:
5709            raise BoostedAPIException(f"Failed to load market trends universes mapping")
5710        universe_id_to_name = {u["id"]: u["name"] for u in data}
5711
5712        outputs = []
5713        for theme in result["data"]["getAllThemesForUser"]:
5714            # map universe ID to universe ticker
5715            universe_tickers = []
5716            for universe_id in theme["universeIds"]:
5717                if universe_id in universe_id_to_name:  # don't support unlisted universes - skip
5718                    universe_name = universe_id_to_name[universe_id]
5719                    ticker = ThemeUniverse.get_ticker_from_name(universe_name)
5720                    if ticker:
5721                        universe_tickers.append(ticker)
5722
5723            outputs.append(
5724                {
5725                    "theme_id": theme["themeId"],
5726                    "theme_name": translate(theme["themeName"]),
5727                    "universes": universe_tickers,
5728                }
5729            )
5730
5731        return outputs
5732
5733    def get_earnings_impacting_security(
5734        self,
5735        isin: str,
5736        currency: Optional[str] = None,
5737        country: Optional[str] = None,
5738        language: Optional[Union[str, Language]] = None,
5739    ) -> List[Dict[str, Any]]:
5740        translate = functools.partial(self.translate_text, language)
5741        date = datetime.date.today().strftime("%Y-%m-%d")
5742        company_data = self.getGbiIdFromIdentCountryCurrencyDate(
5743            ident_country_currency_dates=[
5744                DateIdentCountryCurrency(
5745                    date=date, identifier=isin, country=country, currency=currency
5746                )
5747            ]
5748        )
5749        try:
5750            gbi_id = company_data[0].gbi_id
5751        except Exception:
5752            raise BoostedAPIException(f"ISIN {isin} not found")
5753
5754        result = self._get_graphql(
5755            graphql_queries.EARNINGS_IMPACTS_CALENDAR_FOR_STOCK,
5756            variables={"date": date, "days": 180, "gbiId": gbi_id},
5757            error_msg_prefix="Failed to fetch earnings impacts data for stock",
5758        )
5759        earnings_events = result["data"]["earningsCalendarForStock"]
5760        output_events = []
5761        for event in earnings_events:
5762            if not event["impactedCompanies"]:
5763                continue
5764            fixed_event = {
5765                "event_date": event["eventDate"],
5766                "company_name": event["security"]["name"],
5767                "symbol": event["security"]["symbol"],
5768                "isin": event["security"]["isin"],
5769                "impact_reason": translate(event["impactedCompanies"][0]["reason"]),
5770            }
5771            output_events.append(fixed_event)
5772
5773        return output_events
5774
5775    def get_earnings_insights_for_stocks(
5776        self, isin: str, currency: Optional[str] = None, country: Optional[str] = None
5777    ) -> Dict[str, Any]:
5778        date = datetime.date.today().strftime("%Y-%m-%d")
5779        company_data = self.getGbiIdFromIdentCountryCurrencyDate(
5780            ident_country_currency_dates=[
5781                DateIdentCountryCurrency(
5782                    date=date, identifier=isin, country=country, currency=currency
5783                )
5784            ]
5785        )
5786        gbi_id_isin_map = {
5787            company.gbi_id: company.isin_info.identifier
5788            for company in company_data
5789            if company is not None
5790        }
5791        try:
5792            resp = self._get_graphql(
5793                query=graphql_queries.GET_EARNINGS_INSIGHTS_SUMMARIES,
5794                variables={"gbiIds": list(gbi_id_isin_map.keys())},
5795            )
5796            # list of objects with gbi id and data
5797            summaries = resp["data"]["getEarningsSummaries"]
5798            resp = self._get_graphql(
5799                query=graphql_queries.GET_EARNINGS_COMPARISONS,
5800                variables={"gbiIds": list(gbi_id_isin_map.keys())},
5801            )
5802            # list of objects with gbi id and data
5803            comparison = resp["data"]["getLatestEarningsChanges"]
5804        except Exception:
5805            raise BoostedAPIException(f"Failed to earnings insights data")
5806
5807        if not summaries:
5808            raise BoostedAPIException(
5809                (
5810                    f"Failed to find earnings insights data for {isin}"
5811                    ", please try with another security"
5812                )
5813            )
5814
5815        output: Dict[str, Any] = {}
5816        reports = sorted(summaries[0]["reports"], key=lambda r: r["date"], reverse=True)
5817        current_report = reports[0]
5818
5819        def is_aligned_formatter(acc: Tuple[List, List], cur: Dict[str, Any]):
5820            if cur["isAligned"]:
5821                acc[0].append({k: cur[k] for k in cur if k != "isAligned"})
5822            else:
5823                acc[1].append({k: cur[k] for k in cur if k != "isAligned"})
5824            return acc
5825
5826        current_report_common_remarks: Union[List[Dict[str, Any]], List]
5827        current_report_dropped_remarks: Union[List[Dict[str, Any]], List]
5828        current_report_common_remarks, current_report_dropped_remarks = functools.reduce(
5829            is_aligned_formatter, current_report["details"], ([], [])
5830        )
5831        prev_report_common_remarks: Union[List[Dict[str, Any]], List]
5832        prev_report_new_remarks: Union[List[Dict[str, Any]], List]
5833        prev_report_common_remarks, prev_report_new_remarks = functools.reduce(
5834            is_aligned_formatter, current_report["details"], ([], [])
5835        )
5836
5837        output["earnings_report"] = {
5838            "release_date": datetime.datetime.strptime(current_report["date"], "%Y-%m-%d").date(),
5839            "quarter": current_report["quarter"],
5840            "year": current_report["year"],
5841            "details": [
5842                {
5843                    "header": detail_obj["header"],
5844                    "detail": detail_obj["detail"],
5845                    "sentiment": detail_obj["sentiment"],
5846                }
5847                for detail_obj in current_report["details"]
5848            ],
5849            "call_summary": current_report["highlights"],
5850            "common_remarks": current_report_common_remarks,
5851            "dropped_remarks": current_report_dropped_remarks,
5852            "qa_summary": current_report["qaHighlights"],
5853            "qa_details": current_report["qaDetails"],
5854        }
5855        prev_report = summaries[0]["reports"][1]
5856        output["prior_earnings_report"] = {
5857            "release_date": datetime.datetime.strptime(prev_report["date"], "%Y-%m-%d").date(),
5858            "quarter": prev_report["quarter"],
5859            "year": prev_report["year"],
5860            "details": [
5861                {
5862                    "header": detail_obj["header"],
5863                    "detail": detail_obj["detail"],
5864                    "sentiment": detail_obj["sentiment"],
5865                }
5866                for detail_obj in prev_report["details"]
5867            ],
5868            "call_summary": prev_report["highlights"],
5869            "common_remarks": prev_report_common_remarks,
5870            "new_remarks": prev_report_new_remarks,
5871            "qa_summary": prev_report["qaHighlights"],
5872            "qa_details": prev_report["qaDetails"],
5873        }
5874
5875        if not comparison:
5876            output["report_comparison"] = []
5877        else:
5878            output["report_comparison"] = comparison[0]["changes"]
5879
5880        return output
5881
5882    def get_portfolio_inference_status(self, portfolio_id: str, inference_date: str) -> dict:
5883        url = f"{self.base_uri}/api/inference/status/{portfolio_id}/{inference_date}"
5884        headers = {"Authorization": "ApiKey " + self.api_key}
5885        res = requests.get(url, headers=headers)
5886
5887        if not res.ok:
5888            error_msg = self._try_extract_error_code(res)
5889            logger.error(error_msg)
5890            raise BoostedAPIException(
5891                f"Failed to get portfolio inference status, portfolio_id={portfolio_id}, "
5892                f"inference_date={inference_date}: {error_msg}"
5893            )
5894
5895        data = res.json()
5896        return data
5897
5898    def delete_portfolios(self, model_to_portfolios: Dict[str, List[str]]) -> None:
5899        """
5900        Deletes a list of portfolios
5901
5902        Args:
5903            model_to_portfolios: Mapping from model_id -> list of corresponding portfolios to delete
5904        """
5905        for model_id, portfolios in model_to_portfolios.items():
5906            for portfolio_id in portfolios:
5907                url = self.base_uri + "/api/models/{0}/constraints/{1}/delete".format(
5908                    model_id, portfolio_id
5909                )
5910                headers = {"Authorization": "ApiKey " + self.api_key}
5911                res = requests.put(url, headers=headers, **self._request_params)
5912                if not res.ok:
5913                    error_msg = self._try_extract_error_code(res)
5914                    logger.error(error_msg)
5915                    raise BoostedAPIException("Failed to delete portfolios: {0}.".format(error_msg))
logger = <Logger boosted.api.client (WARNING)>
g_boosted_api_url = 'https://insights.boosted.ai'
g_boosted_api_url_dev = 'https://insights-dev.boosted.ai'
WATCHLIST_ROUTE_PREFIX = '/api/dal/watchlist'
ROUTE_PREFIX = '/api/dal/watchlist'
DAL_WATCHLIST_ROUTE = '/api/v0/watchlist'
DAL_SECURITIES_ROUTE = '/api/v0/securities'
DAL_PA_ROUTE = '/api/v0/portfolio-analysis'
PORTFOLIO_GROUP_ROUTE = '/api/v0/portfolio-group'
RISK_FACTOR = 'risk-factor'
RISK_FACTOR_V2 = 'risk-factor-v2'
RISK_FACTOR_COLUMNS = ['depth', 'identifier', 'stock_count', 'volatility', 'exposure', 'rating', 'rating_delta']
class BoostedClient:
  85class BoostedClient:
  86    def __init__(
  87        self, api_key, override_uri=None, debug=False, proxy=None, disable_verify_ssl=False
  88    ):
  89        """
  90        Parameters
  91        ----------
  92        api_key: str
  93            Your API key provided by the Boosted application.  See your profile
  94            to generate a new key.
  95        proxy: str
  96            Your organization may require the use of a proxy for access.
  97            The address of a HTTPS proxy in the format of <address>:<port>.
  98            Examples are "123.456.789:123" or "my.proxy.com:123".
  99            Do not prepend with "https://".
 100        disable_verify_ssl: bool
 101            Your networking setup may be behind a firewall which performs SSL
 102            inspection. Either set the REQUESTS_CA_BUNDLE environment variable
 103            to point to the location of a custom certificate bundle, or set this
 104            parameter to True to disable SSL verification as a workaround.
 105        """
 106        if override_uri is None:
 107            self.base_uri = g_boosted_api_url
 108        else:
 109            self.base_uri = override_uri
 110        self.api_key = api_key
 111        self.debug = debug
 112        self._request_params: Dict = {}
 113        if debug:
 114            logger.setLevel(logging.DEBUG)
 115        else:
 116            logger.setLevel(logging.INFO)
 117        if proxy is not None:
 118            self._request_params["proxies"] = {"https": proxy}
 119        if disable_verify_ssl:
 120            self._request_params["verify"] = False
 121
 122    def __print_json_info(self, json_data, isInference=False):
 123        if "warnings" in json_data.keys():
 124            for warning in json_data["warnings"]:
 125                logger.warning("  {0}".format(warning))
 126        if "errors" in json_data.keys():
 127            for error in json_data["errors"]:
 128                logger.error("  {0}".format(error))
 129                return Status.FAIL
 130
 131        if "result" in json_data.keys():
 132            results_data = json_data["result"]
 133            if isInference:
 134                if "inferenceResultsUrl" in results_data.keys():
 135                    res_url = parse.urlparse(results_data["inferenceResultsUrl"])
 136                    logger.debug(res_url)
 137                    logger.info("Inference started.")
 138            if "updateCount" in results_data.keys():
 139                logger.info("Updated {0} rows.".format(results_data["updateCount"]))
 140            if "createCount" in results_data.keys():
 141                logger.info("Created {0} rows.".format(results_data["createCount"]))
 142            return Status.SUCCESS
 143
 144    def __to_date_obj(self, dt):
 145        if isinstance(dt, datetime.datetime):
 146            dt = dt.date()
 147        elif isinstance(dt, datetime.date):
 148            return dt
 149        elif isinstance(dt, str):
 150            try:
 151                dt = parser.parse(dt).date()
 152            except ValueError:
 153                raise ValueError('dt: "' + dt + '" is not a valid date.')
 154        return dt
 155
 156    def __iso_format(self, dt):
 157        date = self.__to_date_obj(dt)
 158        if date is not None:
 159            date = date.isoformat()
 160        return date
 161
 162    def _check_status_code(self, response, isInference=False):
 163        has_json = False
 164        try:
 165            logger.debug(response.headers)
 166            if "Content-Type" in response.headers:
 167                if response.headers["Content-Type"].startswith("application/json"):
 168                    json_data = response.json()
 169                    has_json = True
 170            else:
 171                has_json = False
 172        except json.JSONDecodeError:
 173            logger.error("ERROR: response has no JSON payload.")
 174        if response.status_code == 200 or response.status_code == 202:
 175            if has_json:
 176                self.__print_json_info(json_data, isInference)
 177            else:
 178                pass
 179            return Status.SUCCESS
 180        if response.status_code == 404:
 181            if has_json:
 182                self.__print_json_info(json_data, isInference)
 183            raise BoostedAPIException(
 184                'Server "{0}" not reachable.  Code {1}.'.format(
 185                    self.base_uri, response.status_code
 186                ),
 187                data=response,
 188            )
 189        if response.status_code == 400:
 190            if has_json:
 191                self.__print_json_info(json_data, isInference)
 192            if isInference:
 193                return Status.FAIL
 194            else:
 195                raise BoostedAPIException("Error, bad request.  Check the dataset ID.", response)
 196        if response.status_code == 401:
 197            if has_json:
 198                self.__print_json_info(json_data, isInference)
 199            raise BoostedAPIException("Authorization error.", response)
 200        else:
 201            if has_json:
 202                self.__print_json_info(json_data, isInference)
 203            raise BoostedAPIException(
 204                "Error in API response.  Status code={0} {1}\n{2}".format(
 205                    response.status_code, response.reason, response.headers
 206                ),
 207                response,
 208            )
 209
 210    def _try_extract_error_code(self, result):
 211        logger.info(result.headers)
 212        if "Content-Type" in result.headers:
 213            if result.headers["Content-Type"].startswith("application/json"):
 214                if "errors" in result.json():
 215                    return result.json()["errors"]
 216            if result.headers["Content-Type"].startswith("text/plain"):
 217                return result.text
 218        return str(result.reason)
 219
 220    def _check_ok_or_err_with_msg(self, res, potential_error_msg: str):
 221        if not res.ok:
 222            error = self._try_extract_error_code(res)
 223            logger.error(error)
 224            raise BoostedAPIException(f"{potential_error_msg}: {error}")
 225
 226    def _get_portfolio_rebalance_from_periods(
 227        self, portfolio_id: str, rel_periods: List[str]
 228    ) -> List[datetime.date]:
 229        """
 230        Returns a list of rebalance dates for a portfolio given a list of
 231        relative periods of format '1D', '1W', '3M', etc.
 232        """
 233        resp = self._get_graphql(
 234            query=graphql_queries.GET_PORTFOLIO_RELATIVE_DATES_QUERY,
 235            variables={"portfolioId": portfolio_id, "relativePeriods": rel_periods},
 236        )
 237        dates = resp["data"]["portfolio"]["relativeDates"]
 238        return [datetime.datetime.strptime(d["date"], "%Y-%m-%d").date() for d in dates]
 239
 240    def translate_text(self, language: Optional[Union[Language, str]], text: str) -> str:
 241        if not language or language == Language.ENGLISH:
 242            # By default, do not translate English
 243            return text
 244
 245        params = {"text": text, "langCode": language}
 246        url = self.base_uri + "/api/translate/translate-text"
 247        headers = {"Authorization": "ApiKey " + self.api_key}
 248        logger.info("Translating text...")
 249        res = requests.post(url, json=params, headers=headers, **self._request_params)
 250        try:
 251            result = res.json()["translatedText"]
 252        except Exception:
 253            raise BoostedAPIException("Error translating text")
 254        return result
 255
 256    def query_dataset(self, dataset_id):
 257        url = self.base_uri + "/api/datasets/{0}".format(dataset_id)
 258        headers = {"Authorization": "ApiKey " + self.api_key}
 259        res = requests.get(url, headers=headers, **self._request_params)
 260        if res.ok:
 261            return res.json()
 262        else:
 263            error_msg = self._try_extract_error_code(res)
 264            logger.error(error_msg)
 265            raise BoostedAPIException("Failed to query dataset: {0}.".format(error_msg))
 266
 267    def query_namespace_dataset_id(self, namespace, data_type):
 268        url = self.base_uri + f"/api/custom-security-dataset/{namespace}/{data_type}"
 269        headers = {"Authorization": "ApiKey " + self.api_key}
 270        res = requests.get(url, headers=headers, **self._request_params)
 271        if res.ok:
 272            return res.json()["result"]["id"]
 273        else:
 274            if res.status_code != 404:
 275                error_msg = self._try_extract_error_code(res)
 276                logger.error(error_msg)
 277                raise BoostedAPIException("Failed to query dataset: {0}.".format(error_msg))
 278            else:
 279                return None
 280
 281    def export_global_data(
 282        self,
 283        dataset_id,
 284        start=(datetime.date.today() - timedelta(days=365 * 25)),
 285        end=datetime.date.today(),
 286        timeout=600,
 287    ):
 288        query_info = self.query_dataset(dataset_id)
 289        if DataSetType[query_info["type"]] != DataSetType.GLOBAL:
 290            raise BoostedAPIException(
 291                f"Incorrect dataset type: {query_info['type']}" f" - Expected {DataSetType.GLOBAL}"
 292            )
 293        return self.export_data(dataset_id, start, end, timeout)
 294
 295    def export_independent_data(
 296        self,
 297        dataset_id,
 298        start=(datetime.date.today() - timedelta(days=365 * 25)),
 299        end=datetime.date.today(),
 300        timeout=600,
 301    ):
 302        query_info = self.query_dataset(dataset_id)
 303        if DataSetType[query_info["type"]] != DataSetType.STRATEGY:
 304            raise BoostedAPIException(
 305                f"Incorrect dataset type: {query_info['type']}"
 306                f" - Expected {DataSetType.STRATEGY}"
 307            )
 308        return self.export_data(dataset_id, start, end, timeout)
 309
 310    def export_dependent_data(
 311        self,
 312        dataset_id,
 313        start=None,
 314        end=None,
 315        timeout=600,
 316    ):
 317        query_info = self.query_dataset(dataset_id)
 318        if DataSetType[query_info["type"]] != DataSetType.STOCK:
 319            raise BoostedAPIException(
 320                f"Incorrect dataset type: {query_info['type']}" f" - Expected {DataSetType.STOCK}"
 321            )
 322
 323        valid_date_range = self.getDatasetDates(dataset_id)
 324        validStart = valid_date_range["validFrom"]
 325        validEnd = valid_date_range["validTo"]
 326
 327        if start is None:
 328            logger.info("Since no start date provided, starting from {0}.".format(validStart))
 329            start = validStart
 330        if end is None:
 331            logger.info("Since no end date provided, ending at {0}.".format(validEnd))
 332            end = validEnd
 333        start = self.__to_date_obj(start)
 334        end = self.__to_date_obj(end)
 335        if start < validStart:
 336            logger.info("Data does not exist before {0}.".format(validStart))
 337            logger.info("Starting from {0}.".format(validStart))
 338            start = validStart
 339        if end > validEnd:
 340            logger.info("Data does not exist after {0}.".format(validEnd))
 341            logger.info("Ending at {0}.".format(validEnd))
 342            end = validEnd
 343        validate_start_and_end_dates(start, end)
 344
 345        logger.info("Data exists from {0} to {1}.".format(start, end))
 346        request_url = "/api/datasets/" + dataset_id + "/export-data"
 347        headers = {"Authorization": "ApiKey " + self.api_key}
 348
 349        data_chunks = []
 350        chunk_size_days = 90
 351        while start <= end:
 352            chunk_end = start + timedelta(days=chunk_size_days)
 353            if chunk_end > end:
 354                chunk_end = end
 355
 356            logger.info("Requesting start={0} end={1}.".format(start, chunk_end))
 357            params = {"start": self.__iso_format(start), "end": self.__iso_format(chunk_end)}
 358            logger.debug("URL={0}, headers={1}, params={2}".format(request_url, headers, params))
 359
 360            res = requests.get(
 361                self.base_uri + request_url,
 362                headers=headers,
 363                params=params,
 364                timeout=timeout,
 365                **self._request_params,
 366            )
 367
 368            if res.ok:
 369                buf = io.StringIO(res.text)
 370                df = pd.read_csv(buf, index_col=0, parse_dates=True)
 371                if "price" in df.columns:
 372                    df = df.drop("price", axis=1)
 373                data_chunks.append(df)
 374            else:
 375                error_msg = self._try_extract_error_code(res)
 376                logger.error(error_msg)
 377                raise BoostedAPIException("Failed to query dataset: {0}.".format(error_msg))
 378
 379            start = start + timedelta(days=chunk_size_days + 1)
 380
 381        return pd.concat(data_chunks)
 382
 383    def export_custom_security_data(
 384        self,
 385        dataset_id,
 386        start=(date.today() - timedelta(days=365 * 25)),
 387        end=date.today(),
 388        timeout=600,
 389    ):
 390        query_info = self.query_dataset(dataset_id)
 391        if DataSetType[query_info["type"]] != DataSetType.SECURITIES_DAILY:
 392            raise BoostedAPIException(
 393                f"Incorrect dataset type: {query_info['type']}"
 394                f" - Expected {DataSetType.SECURITIES_DAILY}"
 395            )
 396        return self.export_data(dataset_id, start, end, timeout)
 397
 398    def export_data(
 399        self,
 400        dataset_id,
 401        start=(datetime.date.today() - timedelta(days=365 * 25)),
 402        end=datetime.date.today(),
 403        timeout=600,
 404    ):
 405        logger.info("Requesting start={0} end={1}.".format(start, end))
 406        request_url = "/api/datasets/" + dataset_id + "/export-data"
 407        headers = {"Authorization": "ApiKey " + self.api_key}
 408        start = self.__iso_format(start)
 409        end = self.__iso_format(end)
 410        params = {"start": start, "end": end}
 411        logger.debug("URL={0}, headers={1}, params={2}".format(request_url, headers, params))
 412        res = requests.get(
 413            self.base_uri + request_url,
 414            headers=headers,
 415            params=params,
 416            timeout=timeout,
 417            **self._request_params,
 418        )
 419        if res.ok or self._check_status_code(res):
 420            buf = io.StringIO(res.text)
 421            df = pd.read_csv(buf, index_col=0, parse_dates=True)
 422            if "price" in df.columns:
 423                df = df.drop("price", axis=1)
 424            return df
 425        else:
 426            error_msg = self._try_extract_error_code(res)
 427            logger.error(error_msg)
 428            raise BoostedAPIException("Failed to query dataset: {0}.".format(error_msg))
 429
 430    def _get_inference(self, model_id, inference_date=datetime.date.today()):
 431        request_url = "/api/models/" + model_id + "/inference-results"
 432        headers = {"Authorization": "ApiKey " + self.api_key}
 433        params = {}
 434        params["date"] = self.__iso_format(inference_date)
 435        logger.debug(request_url + ", " + str(headers) + ", " + str(params))
 436        res = requests.get(
 437            self.base_uri + request_url, headers=headers, params=params, **self._request_params
 438        )
 439        status = self._check_status_code(res, isInference=True)
 440        if status == Status.SUCCESS:
 441            return res, status
 442        else:
 443            return None, status
 444
 445    def get_inference(
 446        self, model_id, inference_date=datetime.date.today(), block=False, timeout_minutes=30
 447    ):
 448        start_time = datetime.datetime.now()
 449        while True:
 450            for numRetries in range(3):
 451                res, status = self._get_inference(model_id, inference_date)
 452                if res is not None:
 453                    continue
 454                else:
 455                    if status == Status.FAIL:
 456                        return Status.FAIL
 457                    logger.info("Retrying...")
 458            if res is None:
 459                logger.error("Max retries reached.  Request failed.")
 460                return None
 461
 462            json_data = res.json()
 463            if "result" in json_data.keys():
 464                if json_data["result"]["status"] == "RUNNING":
 465                    still_running = True
 466                    if not block:
 467                        logger.warn("Inference job is still running.")
 468                        return None
 469                    else:
 470                        logger.info(
 471                            "Inference job is still running.  Time elapsed={0}.".format(
 472                                datetime.datetime.now() - start_time
 473                            )
 474                        )
 475                        time.sleep(10)
 476                else:
 477                    still_running = False
 478
 479                if not still_running and json_data["result"]["status"] == "COMPLETE":
 480                    csv = json_data["result"]["signals"]
 481                    logger.info(json_data["result"])
 482                    if self._check_status_code(res, isInference=True):
 483                        logger.info(
 484                            "Total run time = {0}.".format(datetime.datetime.now() - start_time)
 485                        )
 486                        return csv
 487            else:
 488                if "errors" in json_data.keys():
 489                    logger.error(json_data["errors"])
 490                else:
 491                    logger.error("Error getting inference for date {0}.".format(inference_date))
 492                return None
 493            if (datetime.datetime.now() - start_time).total_seconds() / 60.0 > timeout_minutes:
 494                logger.error("Timeout waiting for job completion.")
 495                return None
 496
 497    def createDataset(self, schema):
 498        request_url = "/api/datasets"
 499        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
 500        s = json.dumps(schema)
 501        logger.info("Creating dataset with schema " + s)
 502        res = requests.post(
 503            self.base_uri + request_url, data=s, headers=headers, **self._request_params
 504        )
 505        if res.ok:
 506            return res.json()["result"]
 507        else:
 508            raise BoostedAPIException("Dataset creation failed.")
 509
 510    def create_custom_namespace_dataset(self, namespace, schema):
 511        request_url = f"/api/custom-security-dataset/{namespace}"
 512        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
 513        s = json.dumps(schema)
 514        logger.info("Creating dataset with schema " + s)
 515        res = requests.post(
 516            self.base_uri + request_url, data=s, headers=headers, **self._request_params
 517        )
 518        if res.ok:
 519            return res.json()["result"]
 520        else:
 521            raise BoostedAPIException("Dataset creation failed.")
 522
 523    def getUniverse(self, modelId, date=None):
 524        if date is not None:
 525            url = "/api/models/{0}/universe/{1}".format(modelId, self.__iso_format(date))
 526            logger.info("Getting universe for date: {0}.".format(date))
 527        else:
 528            url = "/api/models/{0}/universe/".format(modelId)
 529        headers = {"Authorization": "ApiKey " + self.api_key}
 530        res = requests.get(self.base_uri + url, headers=headers, **self._request_params)
 531        if res.ok:
 532            buf = io.StringIO(res.text)
 533            df = pd.read_csv(buf, index_col=0, parse_dates=True)
 534            return df
 535        else:
 536            error = self._try_extract_error_code(res)
 537            logger.error(
 538                "There was a problem getting this universe or model ID: {0}.".format(error)
 539            )
 540            raise BoostedAPIException("Failed to get universe: {0}".format(error))
 541
 542    def add_custom_security_namespace_members(
 543        self, namespace, members: Union[pandas.DataFrame, str]
 544    ) -> Tuple[pandas.DataFrame, str]:
 545        url = self.base_uri + "/api/synthetic-datasets/{0}/generate".format(namespace)
 546        headers = {"Authorization": "ApiKey " + self.api_key}
 547        headers["Content-Type"] = "application/json"
 548        logger.info("Adding custom security namespace for namespace: {0}".format(namespace))
 549        strbuf = None
 550        if isinstance(members, pandas.DataFrame):
 551            df = members
 552            df_canon = df.rename(columns={_: to_camel_case(_) for _ in df.columns})
 553            canon_cols = ["Currency", "Symbol", "Country", "Name"]
 554            if set(canon_cols).difference(df_canon.columns):
 555                raise BoostedAPIException(f"Expected columns: {canon_cols}")
 556            df_canon = df_canon.loc[:, canon_cols]
 557            buf = io.StringIO()
 558            df_canon.to_json(buf, orient="records")
 559            strbuf = buf.getvalue()
 560        elif isinstance(members, str):
 561            strbuf = members
 562        else:
 563            raise BoostedAPIException(f"Unsupported members argument type: {type(members)}")
 564        res = requests.post(url, data=strbuf, headers=headers, **self._request_params)
 565        if res.ok:
 566            res_obj = res.json()
 567            res_df = pandas.Series(res_obj["generatedISIN"]).to_frame()
 568            res_df.index.name = "Symbol"
 569            res_df.columns = ["ISIN"]
 570            logger.info("Add to custom security namespace successful.")
 571            if "warnings" in res_obj:
 572                logger.info("Warnings: {0}.".format(res.json()["warnings"]))
 573                return res_df, res.json()["warnings"]
 574            else:
 575                return res_df, "No warnings."
 576        else:
 577            error_msg = self._try_extract_error_code(res)
 578            raise BoostedAPIException("Failed to get universe: {0}.".format(error_msg))
 579
 580    def updateUniverse(self, modelId, universe_df, date=datetime.date.today() + timedelta(1)):
 581        date = self.__iso_format(date)
 582        url = self.base_uri + "/api/models/{0}/universe/{1}".format(modelId, date)
 583        headers = {"Authorization": "ApiKey " + self.api_key}
 584        logger.info("Updating universe for date {0}.".format(date))
 585        if isinstance(universe_df, pd.core.frame.DataFrame):
 586            buf = io.StringIO()
 587            universe_df.to_csv(buf)
 588            target = ("uploaded_universe.csv", buf.getvalue(), "text/csv")
 589            files_req = {}
 590            files_req["universe"] = target
 591            res = requests.post(url, files=files_req, headers=headers, **self._request_params)
 592        elif isinstance(universe_df, str):
 593            target = ("uploaded_universe.csv", universe_df, "text/csv")
 594            files_req = {}
 595            files_req["universe"] = target
 596            res = requests.post(url, files=files_req, headers=headers, **self._request_params)
 597        else:
 598            raise BoostedAPIException("Expected CSV as str or Pandas DataFrame.")
 599        if res.ok:
 600            logger.info("Universe update successful.")
 601            if "warnings" in res.json():
 602                logger.info("Warnings: {0}.".format(res.json()["warnings"]))
 603                return res.json()["warnings"]
 604            else:
 605                return "No warnings."
 606        else:
 607            error_msg = self._try_extract_error_code(res)
 608            raise BoostedAPIException("Failed to get universe: {0}.".format(error_msg))
 609
 610    def create_universe(
 611        self, universe: Union[pd.DataFrame, str], name: str, description: str
 612    ) -> List[str]:
 613        PRESENT = "PRESENT"
 614        ANY = "ANY"
 615        EARLIST_DATE = "1900-01-01"
 616        LATEST_DATE = "4000-01-01"
 617
 618        if isinstance(universe, (str, bytes, os.PathLike)):
 619            universe = pd.read_csv(universe)
 620
 621        universe.columns = universe.columns.str.lower()
 622
 623        # Clients are free to leave out data. Fill in some defaults here.
 624        if "from" not in universe.columns:
 625            universe["from"] = EARLIST_DATE
 626        if "to" not in universe.columns:
 627            universe["to"] = LATEST_DATE
 628        if "currency" not in universe.columns:
 629            universe["currency"] = ANY
 630        if "country" not in universe.columns:
 631            universe["country"] = ANY
 632        if "isin" not in universe.columns:
 633            universe["isin"] = None
 634        if "symbol" not in universe.columns:
 635            universe["symbol"] = None
 636
 637        # to prevent conflicts with python keywords
 638        universe.rename(columns={"from": "from_date", "to": "to_date"}, inplace=True)
 639
 640        universe = universe.replace({np.nan: None})
 641        security_country_currency_date_list = []
 642        for i, r in enumerate(universe.itertuples()):
 643            id_type = ColumnSubRole.ISIN
 644            identifier = r.isin
 645
 646            if identifier is None:
 647                id_type = ColumnSubRole.SYMBOL
 648                identifier = str(r.symbol)
 649
 650            # if identifier is still None, it means that there is no ISIN or
 651            # SYMBOL for this row, in which case we throw an error
 652            if identifier is None:
 653                raise BoostedAPIException(
 654                    (
 655                        f"Missing identifier column in universe row {i + 1}"
 656                        " should contain ISIN or Symbol"
 657                    )
 658                )
 659
 660            security_country_currency_date_list.append(
 661                DateIdentCountryCurrency(
 662                    date=r.from_date or EARLIST_DATE,
 663                    identifier=identifier,
 664                    country=r.country or ANY,
 665                    currency=r.currency or ANY,
 666                    id_type=id_type,
 667                )
 668            )
 669
 670        gbi_id_objs = self.getGbiIdFromIdentCountryCurrencyDate(security_country_currency_date_list)
 671
 672        security_list = []
 673        for i, r in enumerate(universe.itertuples()):
 674            # if we have a None here, we failed to map to a gbi id
 675            if gbi_id_objs[i] is None:
 676                raise BoostedAPIException(f"Unable to map row: {tuple(r)}")
 677
 678            security_list.append(
 679                {
 680                    "stockId": gbi_id_objs[i].gbi_id,
 681                    "fromZ": r.from_date or EARLIST_DATE,
 682                    "toZ": LATEST_DATE if r.to_date in (PRESENT, None) else r.to_date,
 683                    "removal": False,
 684                    "source": "UPLOAD",
 685                }
 686            )
 687
 688        url = self.base_uri + "/api/template-universe/save"
 689        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
 690        req = {"name": name, "description": description, "modificationDaos": security_list}
 691
 692        res = requests.post(url, json=req, headers=headers, **self._request_params)
 693        self._check_ok_or_err_with_msg(res, "Failed to create universe")
 694
 695        if "warnings" in res.json():
 696            logger.info("Warnings: {0}.".format(res.json()["warnings"]))
 697            return res.json()["warnings"].splitlines()
 698        else:
 699            return []
 700
 701    def validate_dataframe(self, df):
 702        if not isinstance(df, pd.core.frame.DataFrame):
 703            logger.error("Dataset must be of type Dataframe.")
 704            return False
 705        if type(df.index) != pd.core.indexes.datetimes.DatetimeIndex:
 706            logger.error("Index must be DatetimeIndex.")
 707            return False
 708        if len(df.columns) == 0:
 709            logger.error("No feature columns exist.")
 710            return False
 711        if len(df) == 0:
 712            logger.error("No rows exist.")
 713        return True
 714
 715    def get_dataset_schema(self, dataset_id):
 716        url = self.base_uri + "/api/datasets/{0}/schema".format(dataset_id)
 717        headers = {"Authorization": "ApiKey " + self.api_key}
 718        res = requests.get(url, headers=headers, **self._request_params)
 719        if res.ok:
 720            json_schema = res.json()
 721        else:
 722            error_msg = self._try_extract_error_code(res)
 723            logger.error(error_msg)
 724            raise BoostedAPIException("Failed to query dataset: {0}.".format(error_msg))
 725        return DataSetConfig.fromDict(json_schema["result"])
 726
 727    def add_custom_security_daily_dataset(
 728        self, namespace, dataset, schema=None, timeout=600, block=True
 729    ):
 730        result = self.add_custom_security_daily_dataset_with_warnings(
 731            namespace, dataset, schema, timeout, block
 732        )
 733        return result["dataset_id"]
 734
 735    def add_custom_security_daily_dataset_with_warnings(
 736        self,
 737        namespace,
 738        dataset,
 739        schema=None,
 740        timeout=600,
 741        block=True,
 742        no_exception_on_chunk_error=False,
 743    ):
 744        dataset_type = DataSetType.SECURITIES_DAILY
 745        dsid = self.query_namespace_dataset_id(namespace, dataset_type)
 746
 747        if not self.validate_dataframe(dataset):
 748            logger.error("dataset failed validation.")
 749            return None
 750
 751        if dsid is None:
 752            # create the dataset if not exist.
 753            schema = infer_dataset_schema(
 754                "custom_security_daily", dataset, dataset_type, infer_from_column_names=True
 755            )
 756            dsid = self.create_custom_namespace_dataset(namespace, schema.toDict())
 757            data_type = DataAddType.CREATION
 758        elif schema is not None:
 759            raise ValueError(
 760                f"Dataset schema already exists for namespace={namespace}, type={dataset_type}",
 761                ", cannot create another!",
 762            )
 763        else:
 764            data_type = DataAddType.HISTORICAL
 765
 766        logger.info("Created dataset with ID = {0}, uploading...".format(dsid))
 767        result = self.add_custom_security_daily_data(
 768            dsid,
 769            dataset,
 770            timeout,
 771            block,
 772            data_type=data_type,
 773            no_exception_on_chunk_error=no_exception_on_chunk_error,
 774        )
 775        return {
 776            "namespace": namespace,
 777            "dataset_id": dsid,
 778            "warnings": result["warnings"],
 779            "errors": result["errors"],
 780        }
 781
 782    def add_custom_security_daily_data(
 783        self,
 784        dataset_id,
 785        csv_data,
 786        timeout=600,
 787        block=True,
 788        data_type=DataAddType.HISTORICAL,
 789        no_exception_on_chunk_error=False,
 790    ):
 791        warnings = []
 792        query_info = self.query_dataset(dataset_id)
 793        if DataSetType[query_info["type"]] != DataSetType.SECURITIES_DAILY:
 794            raise BoostedAPIException(
 795                f"Incorrect dataset type: {query_info['type']}"
 796                f" - Expected {DataSetType.SECURITIES_DAILY}"
 797            )
 798        warnings, errors = self.setup_chunk_and_upload_data(
 799            dataset_id, csv_data, data_type, timeout, block, no_exception_on_chunk_error
 800        )
 801        if len(warnings) > 0:
 802            logger.warning(
 803                "Encountered {0} total warnings while uploading dataset.".format(len(warnings))
 804            )
 805        if len(errors) > 0:
 806            raise BoostedAPIException(
 807                "Encountered {0} total ERRORS while uploading dataset".format(len(errors))
 808                + "\n".join(errors)
 809            )
 810        return {"warnings": warnings, "errors": errors}
 811
 812    def add_dependent_dataset(
 813        self, dataset, datasetName="DependentDataset", schema=None, timeout=600, block=True
 814    ):
 815        result = self.add_dependent_dataset_with_warnings(
 816            dataset, datasetName, schema, timeout, block
 817        )
 818        return result["dataset_id"]
 819
 820    def add_dependent_dataset_with_warnings(
 821        self,
 822        dataset,
 823        datasetName="DependentDataset",
 824        schema=None,
 825        timeout=600,
 826        block=True,
 827        no_exception_on_chunk_error=False,
 828    ):
 829        if not self.validate_dataframe(dataset):
 830            logger.error("dataset failed validation.")
 831            return None
 832        if schema is None:
 833            schema = infer_dataset_schema(datasetName, dataset, DataSetType.STOCK)
 834        dsid = self.createDataset(schema.toDict())
 835        logger.info("Creating dataset with ID = {0}.".format(dsid))
 836        result = self.add_dependent_data(
 837            dsid,
 838            dataset,
 839            timeout,
 840            block,
 841            data_type=DataAddType.CREATION,
 842            no_exception_on_chunk_error=no_exception_on_chunk_error,
 843        )
 844        return {"dataset_id": dsid, "warnings": result["warnings"], "errors": result["errors"]}
 845
 846    def add_independent_dataset(
 847        self, dataset, datasetName="IndependentDataset", schema=None, timeout=600, block=True
 848    ):
 849        result = self.add_independent_dataset_with_warnings(
 850            dataset, datasetName, schema, timeout, block
 851        )
 852        return result["dataset_id"]
 853
 854    def add_independent_dataset_with_warnings(
 855        self,
 856        dataset,
 857        datasetName="IndependentDataset",
 858        schema=None,
 859        timeout=600,
 860        block=True,
 861        no_exception_on_chunk_error=False,
 862    ):
 863        if not self.validate_dataframe(dataset):
 864            logger.error("dataset failed validation.")
 865            return None
 866        if schema is None:
 867            schema = infer_dataset_schema(datasetName, dataset, DataSetType.STRATEGY)
 868        schemaDict = schema.toDict()
 869        if "configurationDataJson" not in schemaDict:
 870            schemaDict["configurationDataJson"] = "{}"
 871        dsid = self.createDataset(schemaDict)
 872        logger.info("Creating dataset with ID = {0}.".format(dsid))
 873        result = self.add_independent_data(
 874            dsid,
 875            dataset,
 876            timeout,
 877            block,
 878            data_type=DataAddType.CREATION,
 879            no_exception_on_chunk_error=no_exception_on_chunk_error,
 880        )
 881        return {"dataset_id": dsid, "warnings": result["warnings"], "errors": result["errors"]}
 882
 883    def add_global_dataset(
 884        self, dataset, datasetName="GlobalDataset", schema=None, timeout=600, block=True
 885    ):
 886        result = self.add_global_dataset_with_warnings(dataset, datasetName, schema, timeout, block)
 887        return result["dataset_id"]
 888
 889    def add_global_dataset_with_warnings(
 890        self,
 891        dataset,
 892        datasetName="GlobalDataset",
 893        schema=None,
 894        timeout=600,
 895        block=True,
 896        no_exception_on_chunk_error=False,
 897    ):
 898        if not self.validate_dataframe(dataset):
 899            logger.error("dataset failed validation.")
 900            return None
 901        if schema is None:
 902            schema = infer_dataset_schema(datasetName, dataset, DataSetType.GLOBAL)
 903        dsid = self.createDataset(schema.toDict())
 904        logger.info("Creating dataset with ID = {0}.".format(dsid))
 905        result = self.add_global_data(
 906            dsid,
 907            dataset,
 908            timeout,
 909            block,
 910            data_type=DataAddType.CREATION,
 911            no_exception_on_chunk_error=no_exception_on_chunk_error,
 912        )
 913        return {"dataset_id": dsid, "warnings": result["warnings"], "errors": result["errors"]}
 914
 915    def add_independent_data(
 916        self,
 917        dataset_id,
 918        csv_data,
 919        timeout=600,
 920        block=True,
 921        data_type=DataAddType.HISTORICAL,
 922        no_exception_on_chunk_error=False,
 923    ):
 924        query_info = self.query_dataset(dataset_id)
 925        if DataSetType[query_info["type"]] != DataSetType.STRATEGY:
 926            raise BoostedAPIException(
 927                f"Incorrect dataset type: {query_info['type']}"
 928                f" - Expected {DataSetType.STRATEGY}"
 929            )
 930        warnings, errors = self.setup_chunk_and_upload_data(
 931            dataset_id, csv_data, data_type, timeout, block, no_exception_on_chunk_error
 932        )
 933        if len(warnings) > 0:
 934            logger.warning(
 935                "Encountered {0} total warnings while uploading dataset.".format(len(warnings))
 936            )
 937        if len(errors) > 0:
 938            raise BoostedAPIException(
 939                "Encountered {0} total ERRORS while uploading dataset".format(len(errors))
 940                + "\n".join(errors)
 941            )
 942        return {"warnings": warnings, "errors": errors}
 943
 944    def add_dependent_data(
 945        self,
 946        dataset_id,
 947        csv_data,
 948        timeout=600,
 949        block=True,
 950        data_type=DataAddType.HISTORICAL,
 951        no_exception_on_chunk_error=False,
 952    ):
 953        warnings = []
 954        query_info = self.query_dataset(dataset_id)
 955        if DataSetType[query_info["type"]] != DataSetType.STOCK:
 956            raise BoostedAPIException(
 957                f"Incorrect dataset type: {query_info['type']}" f" - Expected {DataSetType.STOCK}"
 958            )
 959        warnings, errors = self.setup_chunk_and_upload_data(
 960            dataset_id, csv_data, data_type, timeout, block, no_exception_on_chunk_error
 961        )
 962        if len(warnings) > 0:
 963            logger.warning(
 964                "Encountered {0} total warnings while uploading dataset.".format(len(warnings))
 965            )
 966        if len(errors) > 0:
 967            raise BoostedAPIException(
 968                "Encountered {0} total ERRORS while uploading dataset".format(len(errors))
 969                + "\n".join(errors)
 970            )
 971        return {"warnings": warnings, "errors": errors}
 972
 973    def add_global_data(
 974        self,
 975        dataset_id,
 976        csv_data,
 977        timeout=600,
 978        block=True,
 979        data_type=DataAddType.HISTORICAL,
 980        no_exception_on_chunk_error=False,
 981    ):
 982        query_info = self.query_dataset(dataset_id)
 983        if DataSetType[query_info["type"]] != DataSetType.GLOBAL:
 984            raise BoostedAPIException(
 985                f"Incorrect dataset type: {query_info['type']}" f" - Expected {DataSetType.GLOBAL}"
 986            )
 987        warnings, errors = self.setup_chunk_and_upload_data(
 988            dataset_id, csv_data, data_type, timeout, block, no_exception_on_chunk_error
 989        )
 990        if len(warnings) > 0:
 991            logger.warning(
 992                "Encountered {0} total warnings while uploading dataset.".format(len(warnings))
 993            )
 994        if len(errors) > 0:
 995            raise BoostedAPIException(
 996                "Encountered {0} total ERRORS while uploading dataset".format(len(errors))
 997                + "\n".join(errors)
 998            )
 999        return {"warnings": warnings, "errors": errors}
1000
1001    def get_csv_buffer(self):
1002        return io.StringIO()
1003
1004    def start_chunked_upload(self, dataset_id):
1005        url = self.base_uri + "/api/datasets/{0}/start-chunked-upload".format(dataset_id)
1006        headers = {"Authorization": "ApiKey " + self.api_key}
1007        res = requests.post(url, headers=headers, **self._request_params)
1008        if res.ok:
1009            return res.json()["result"]
1010        else:
1011            error_msg = self._try_extract_error_code(res)
1012            logger.error(error_msg)
1013            raise BoostedAPIException(
1014                "Failed to obtain dataset lock for upload: {0}.".format(error_msg)
1015            )
1016
1017    def abort_chunked_upload(self, dataset_id, chunk_id):
1018        url = self.base_uri + "/api/datasets/{0}/abort-chunked-upload".format(dataset_id)
1019        headers = {"Authorization": "ApiKey " + self.api_key}
1020        params = {"uploadGroupId": chunk_id}
1021        res = requests.post(url, headers=headers, **self._request_params, params=params)
1022        if not res.ok:
1023            error_msg = self._try_extract_error_code(res)
1024            logger.error(error_msg)
1025            raise BoostedAPIException(
1026                "Failed to abort dataset lock during error: {0}.".format(error_msg)
1027            )
1028
1029    def check_dataset_ingestion_completion(self, dataset_id, chunk_id, start_time):
1030        url = self.base_uri + "/api/datasets/{0}/upload-chunk-status".format(dataset_id)
1031        headers = {"Authorization": "ApiKey " + self.api_key}
1032        params = {"uploadGroupId": chunk_id}
1033        res = requests.get(url, headers=headers, **self._request_params, params=params)
1034        res = res.json()
1035
1036        finished = False
1037        warnings = []
1038        errors = []
1039
1040        if type(res) == dict:
1041            dataset_status = res["datasetStatus"]
1042            chunk_status = res["chunkStatus"]
1043            if chunk_status != ChunkStatus.PROCESSING.value:
1044                finished = True
1045                errors = res["errors"]
1046                warnings = res["warnings"]
1047                successful_rows = res["successfulRows"]
1048                total_rows = res["totalRows"]
1049                logger.info(
1050                    f"Successfully ingested {successful_rows} out of {total_rows} uploaded rows."
1051                )
1052                if chunk_status in [
1053                    ChunkStatus.SUCCESS.value,
1054                    ChunkStatus.WARNING.value,
1055                    ChunkStatus.ERROR.value,
1056                ]:
1057                    if dataset_status != "AVAILABLE":
1058                        raise BoostedAPIException(
1059                            "Dataset was unexpectedly unavailable after chunk upload finished."
1060                        )
1061                    else:
1062                        logger.info("Ingestion complete.  Uploaded data is ready for use.")
1063                elif chunk_status == ChunkStatus.ABORTED.value:
1064                    errors.append(
1065                        "Dataset chunk upload was aborted by server! Upload did not succeed."
1066                    )
1067                else:
1068                    errors.append("Unexpected data ingestion status: {0}.".format(chunk_status))
1069            logger.info(
1070                "Data ingestion still running.  Time elapsed={0}.".format(
1071                    datetime.datetime.now() - start_time
1072                )
1073            )
1074        else:
1075            raise BoostedAPIException("Unable to get status of dataset ingestion.")
1076        return {"finished": finished, "warnings": warnings, "errors": errors}
1077
1078    def _commit_chunked_upload(self, dataset_id, chunk_id, data_type, block=True, timeout=600):
1079        url = self.base_uri + "/api/datasets/{0}/commit-chunked-upload".format(dataset_id)
1080        headers = {"Authorization": "ApiKey " + self.api_key}
1081        params = {
1082            "uploadGroupId": chunk_id,
1083            "dataAddType": data_type,
1084            "sendCompletionEmail": not block,
1085        }
1086        res = requests.post(url, headers=headers, **self._request_params, params=params)
1087        if not res.ok:
1088            error_msg = self._try_extract_error_code(res)
1089            logger.error(error_msg)
1090            raise BoostedAPIException("Failed to commit dataset files: {0}.".format(error_msg))
1091
1092        if block:
1093            start_time = datetime.datetime.now()
1094            # Keep waiting until upload is no longer in UPDATING state...
1095            while True:
1096                result = self.check_dataset_ingestion_completion(dataset_id, chunk_id, start_time)
1097                if result["finished"]:
1098                    break
1099
1100                if (datetime.datetime.now() - start_time).total_seconds() > timeout:
1101                    err_str = (
1102                        f"Timeout waiting for commit of dataset: {dataset_id} | chunk: {chunk_id}"
1103                    )
1104                    logger.error(err_str)
1105                    return [], [err_str]
1106
1107                time.sleep(10)
1108            return result["warnings"], result["errors"]
1109        else:
1110            return [], []
1111
1112    def setup_chunk_and_upload_data(
1113        self,
1114        dataset_id,
1115        csv_data,
1116        data_type,
1117        timeout=600,
1118        block=True,
1119        no_exception_on_chunk_error=False,
1120    ):
1121        chunk_id = self.start_chunked_upload(dataset_id)
1122        logger.info("Obtained lock on dataset for upload: " + chunk_id)
1123        try:
1124            warnings, errors = self.chunk_and_upload_data(
1125                dataset_id, chunk_id, csv_data, timeout, no_exception_on_chunk_error
1126            )
1127            commit_warnings, commit_errors = self._commit_chunked_upload(
1128                dataset_id, chunk_id, data_type, block, timeout
1129            )
1130            return warnings + commit_warnings, errors + commit_errors
1131        except Exception:
1132            self.abort_chunked_upload(dataset_id, chunk_id)
1133            raise
1134
1135    def chunk_and_upload_data(
1136        self, dataset_id, chunk_id, csv_data, timeout=600, no_exception_on_chunk_error=False
1137    ):
1138        if isinstance(csv_data, pd.core.frame.DataFrame):
1139            if not isinstance(csv_data.index, pd.core.indexes.datetimes.DatetimeIndex):
1140                raise BoostedAPIException("DataFrame must have DatetimeIndex as index type.")
1141
1142            warnings = []
1143            errors = []
1144            logger.info("Uploading yearly.")
1145            for t in csv_data.index.to_period("Y").unique():
1146                if t is pd.NaT:
1147                    continue
1148
1149                # serialize bit to string
1150                buf = self.get_csv_buffer()
1151                yearly_csv = csv_data.loc[str(t)]
1152                yearly_csv.to_csv(buf, header=True)
1153                raw_csv = buf.getvalue()
1154
1155                # we are already chunking yearly... but if the csv still exceeds a healthy
1156                # limit of 50mb the final line of defence is to ignore date boundaries and
1157                # just chunk the rows. This is mostly for the cloudflare upload limit.
1158                size_lim = 50 * 1000 * 1000
1159                est_csv_size = sys.getsizeof(raw_csv)
1160                if est_csv_size > size_lim:
1161                    del raw_csv, buf
1162                    logger.info("Yearly data too large for single upload, chunking further...")
1163                    chunks = []
1164                    nchunks = math.ceil(est_csv_size / size_lim)
1165                    rows_per_chunk = math.ceil(len(yearly_csv) / nchunks)
1166                    for i in range(0, len(yearly_csv), rows_per_chunk):
1167                        buf = self.get_csv_buffer()
1168                        split_csv = yearly_csv.iloc[i : i + rows_per_chunk]
1169                        split_csv.to_csv(buf, header=True)
1170                        split_csv = buf.getvalue()
1171                        chunks.append(
1172                            (
1173                                "{0}-{1}".format(i + 1, min(len(yearly_csv), i + rows_per_chunk)),
1174                                split_csv,
1175                            )
1176                        )
1177                else:
1178                    chunks = [("all", raw_csv)]
1179
1180                for i, (rows_descriptor, chunk_csv) in enumerate(chunks):
1181                    chunk_descriptor = "{0} in yearly chunk {1}".format(rows_descriptor, t)
1182                    logger.info(
1183                        "Uploading rows:"
1184                        + chunk_descriptor
1185                        + " (chunk {0} of {1}):".format(i + 1, len(chunks))
1186                    )
1187                    _, new_warnings, new_errors = self.upload_dataset_chunk(
1188                        chunk_descriptor,
1189                        dataset_id,
1190                        chunk_id,
1191                        chunk_csv,
1192                        timeout,
1193                        no_exception_on_chunk_error,
1194                    )
1195                    warnings.extend(new_warnings)
1196                    errors.extend(new_errors)
1197            return warnings, errors
1198
1199        elif isinstance(csv_data, str):
1200            _, warnings, errors = self.upload_dataset_chunk(
1201                "all data", dataset_id, chunk_id, csv_data, timeout, no_exception_on_chunk_error
1202            )
1203            return warnings, errors
1204        else:
1205            raise BoostedAPIException("Expected CSV as str or Pandas DataFrame.")
1206
1207    def upload_dataset_chunk(
1208        self,
1209        chunk_descriptor,
1210        dataset_id,
1211        chunk_id,
1212        csv_data,
1213        timeout=600,
1214        no_exception_on_chunk_error=False,
1215    ):
1216        logger.info("Starting upload: " + chunk_descriptor)
1217        url = self.base_uri + "/api/datasets/{0}/upload-dataset-chunk".format(dataset_id)
1218        headers = {"Authorization": "ApiKey " + self.api_key}
1219        files_req = {}
1220        warnings = []
1221        errors = []
1222
1223        # make the network request
1224        target = ("uploaded_data.csv", csv_data, "text/csv")
1225        files_req["dataFile"] = target
1226        params = {"uploadGroupId": chunk_id}
1227        res = requests.post(
1228            url,
1229            params=params,
1230            files=files_req,
1231            headers=headers,
1232            timeout=timeout,
1233            **self._request_params,
1234        )
1235
1236        if res.ok:
1237            logger.info(
1238                (
1239                    "Chunk upload completed.  "
1240                    "Ingestion started.  "
1241                    "Please wait until the data is in AVAILABLE state."
1242                )
1243            )
1244            if "warnings" in res.json():
1245                warnings = res.json()["warnings"]
1246                if len(warnings) > 0:
1247                    logger.warning("Uploaded chunk encountered data warnings: ")
1248                for w in warnings:
1249                    logger.warning(w)
1250        else:
1251            reason = "Upload failed: {0}, {1}".format(res.text, res.reason)
1252            logger.error(reason)
1253            if no_exception_on_chunk_error:
1254                errors.append(
1255                    "Chunk {0} failed: {1}. ".format(chunk_descriptor, reason)
1256                    + "Your data was only PARTIALLY uploaded. "
1257                    + "Please reattempt the upload of this chunk."
1258                )
1259            else:
1260                raise BoostedAPIException(reason)
1261
1262        return res, warnings, errors
1263
1264    def getAllocationsForDate(self, portfolio_id, date, rollback_to_last_available_date):
1265        date = self.__iso_format(date)
1266        endpoint = "latest-allocations" if rollback_to_last_available_date else "allocations"
1267        url = self.base_uri + "/api/portfolios/{0}/{1}".format(portfolio_id, endpoint)
1268        headers = {"Authorization": "ApiKey " + self.api_key}
1269        params = {"date": date}
1270        logger.info("Retrieving allocations information for date {0}.".format(date))
1271        res = requests.get(url, params=params, headers=headers, **self._request_params)
1272        if res.ok:
1273            logger.info("Allocations retrieval successful.")
1274            return res.json()
1275        else:
1276            error_msg = self._try_extract_error_code(res)
1277            raise BoostedAPIException("Failed to retrieve allocations: {0}.".format(error_msg))
1278
1279    # New API method for fetching data from portfolio_holdings.pb2 file.
1280    def getAllocationsForDateV2(self, portfolio_id, date, rollback_to_last_available_date):
1281        date = self.__iso_format(date)
1282        endpoint = "latest-allocations-v2" if rollback_to_last_available_date else "allocations-v2"
1283        url = self.base_uri + "/api/portfolios/{0}/{1}".format(portfolio_id, endpoint)
1284        headers = {"Authorization": "ApiKey " + self.api_key}
1285        params = {"date": date}
1286        logger.info("Retrieving allocations information for date {0}.".format(date))
1287        res = requests.get(url, params=params, headers=headers, **self._request_params)
1288        if res.ok:
1289            logger.info("Allocations retrieval successful.")
1290            return res.json()
1291        else:
1292            error_msg = self._try_extract_error_code(res)
1293            raise BoostedAPIException("Failed to retrieve allocations: {0}.".format(error_msg))
1294
1295    def getAllocationsByDates(self, portfolio_id, dates=None):
1296        url = self.base_uri + "/api/portfolios/{0}/allocationsByDate".format(portfolio_id)
1297        headers = {"Authorization": "ApiKey " + self.api_key}
1298        if dates is not None:
1299            fmt_dates = []
1300            for d in dates:
1301                fmt_dates.append(self.__iso_format(d))
1302            fmt_dates_str = ",".join(fmt_dates)
1303            params: Dict = {"dates": fmt_dates_str}
1304            logger.info("Retrieving allocations information for dates {0}.".format(fmt_dates))
1305        else:
1306            params = {"dates": None}
1307            logger.info("Retrieving allocations information for all dates")
1308        res = requests.get(url, params=params, headers=headers, **self._request_params)
1309        if res.ok:
1310            logger.info("Allocations retrieval successful.")
1311            return res.json()
1312        else:
1313            error_msg = self._try_extract_error_code(res)
1314            raise BoostedAPIException("Failed to retrieve allocations: {0}.".format(error_msg))
1315
1316    def getSignalsForDate(self, portfolio_id, date, rollback_to_last_available_date):
1317        date = self.__iso_format(date)
1318        endpoint = "latest-signals" if rollback_to_last_available_date else "signals"
1319        url = self.base_uri + "/api/portfolios/{0}/{1}".format(portfolio_id, endpoint)
1320        headers = {"Authorization": "ApiKey " + self.api_key}
1321        params = {"date": date}
1322        logger.info("Retrieving signals information for date {0}.".format(date))
1323        res = requests.get(url, params=params, headers=headers, **self._request_params)
1324        if res.ok:
1325            logger.info("Signals retrieval successful.")
1326            return res.json()
1327        else:
1328            error_msg = self._try_extract_error_code(res)
1329            raise BoostedAPIException("Failed to retrieve signals: {0}.".format(error_msg))
1330
1331    def getSignalsForAllDates(self, portfolio_id, dates=None):
1332        url = self.base_uri + "/api/portfolios/{0}/signalsByDate".format(portfolio_id)
1333        headers = {"Authorization": "ApiKey " + self.api_key}
1334        params = {}
1335        if dates is not None:
1336            fmt_dates = []
1337            for d in dates:
1338                fmt_dates.append(self.__iso_format(d))
1339            fmt_dates_str = ",".join(fmt_dates)
1340            params = {"dates": fmt_dates_str}
1341            logger.info("Retrieving signals information for dates {0}.".format(fmt_dates))
1342        else:
1343            params = {"dates": None}
1344            logger.info("Retrieving signals information for all dates")
1345        res = requests.get(url, params=params, headers=headers, **self._request_params)
1346        if res.ok:
1347            logger.info("Signals retrieval successful.")
1348            return res.json()
1349        else:
1350            error_msg = self._try_extract_error_code(res)
1351            raise BoostedAPIException("Failed to retrieve signals: {0}.".format(error_msg))
1352
1353    def getEquityAccuracy(
1354        self,
1355        model_id: str,
1356        portfolio_id: str,
1357        tickers: List[str],
1358        start_date: Optional[BoostedDate] = None,
1359        end_date: Optional[BoostedDate] = None,
1360    ) -> Dict[str, Dict[str, Any]]:
1361        data: Dict[str, Any] = {}
1362        if start_date is not None:
1363            start_date = convert_date(start_date)
1364            data["startDate"] = start_date.isoformat()
1365        if end_date is not None:
1366            end_date = convert_date(end_date)
1367            data["endDate"] = end_date.isoformat()
1368
1369        if start_date and end_date:
1370            validate_start_and_end_dates(start_date, end_date)
1371
1372        tickers_stream = ",".join(tickers)
1373        data["tickers"] = tickers_stream
1374        data["timestamp"] = time.strftime("%H:%M:%S")
1375        data["shouldRecalc"] = True
1376        url = self.base_uri + f"/api/analysis/equity-accuracy/{model_id}/{portfolio_id}"
1377        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
1378
1379        logger.info(
1380            f"Retrieving equity accuracy data for date range {start_date} to {end_date} "
1381            f"for tickers: {tickers}."
1382        )
1383
1384        # Now create dataframes from the JSON output.
1385        metrics = [
1386            "hit_rate_mean",
1387            "hit_rate_median",
1388            "excess_return_mean",
1389            "excess_return_median",
1390            "return",
1391            "excess_return",
1392        ]
1393
1394        # send the request, retry if failed
1395        MAX_RETRIES = 10  # max of number of retries until timeout
1396        SLEEP_TIME = 3  # waiting time between requests
1397
1398        num_retries = 0
1399        success = False
1400        while not success and num_retries < MAX_RETRIES:
1401            res = requests.post(url, data=json.dumps(data), headers=headers, **self._request_params)
1402            if res.ok:
1403                logger.info("Equity Accuracy Data retrieval successful.")
1404                info = res.json()
1405                success = True
1406            else:
1407                data["shouldRecalc"] = False
1408                num_retries += 1
1409                time.sleep(SLEEP_TIME)
1410
1411        if not success:
1412            raise BoostedAPIException("Failed to retrieve equity accuracy: Request timeout.")
1413
1414        for ticker, accuracy_data in info.items():
1415            for metric in metrics:
1416                metric_matrix = accuracy_data[metric]
1417                if not isinstance(metric_matrix, str):
1418                    # Set the index to the quintile label, and remove it from the data
1419                    index = []
1420                    for row in metric_matrix[1:]:
1421                        index.append(row.pop(0))
1422
1423                    # columns are "1D", "5D", etc.
1424                    df = pd.DataFrame(metric_matrix[1:], columns=metric_matrix[0][1:], index=index)
1425                    accuracy_data[metric] = df
1426        return info
1427
1428    def getHistoricalTradeDates(self, portfolio_id, start_date=None, end_date=None):
1429        end_date = self.__to_date_obj(end_date or datetime.date.today())
1430        start_date = self.__iso_format(start_date or (end_date - timedelta(days=365)))
1431        end_date = self.__iso_format(end_date)
1432
1433        url = self.base_uri + "/api/portfolios/{0}/tradingDates".format(portfolio_id)
1434        headers = {"Authorization": "ApiKey " + self.api_key}
1435        params = {"startDate": start_date, "endDate": end_date}
1436
1437        logger.info(
1438            "Retrieving historical trade dates data for date range {0} to {1}.".format(
1439                start_date, end_date
1440            )
1441        )
1442        res = requests.get(url, params=params, headers=headers, **self._request_params)
1443        if res.ok:
1444            logger.info("Trading dates retrieval successful.")
1445            return res.json()["dates"]
1446        else:
1447            error_msg = self._try_extract_error_code(res)
1448            raise BoostedAPIException("Failed to retrieve trading dates: {0}.".format(error_msg))
1449
1450    def getRankingsForAllDates(self, portfolio_id, dates=None):
1451        url = self.base_uri + "/api/portfolios/{0}/rankingsByDate".format(portfolio_id)
1452        headers = {"Authorization": "ApiKey " + self.api_key}
1453        params = {}
1454        if dates is not None:
1455            fmt_dates = []
1456            for d in dates:
1457                fmt_dates.append(self.__iso_format(d))
1458            fmt_dates_str = ",".join(fmt_dates)
1459            params = {"dates": fmt_dates_str}
1460            logger.info("Retrieving rankings information for date {0}.".format(fmt_dates_str))
1461        else:
1462            params = {"dates": None}
1463            logger.info("Retrieving rankings information for all dates")
1464        res = requests.get(url, params=params, headers=headers, **self._request_params)
1465        if res.ok:
1466            logger.info("Rankings retrieval successful.")
1467            return res.json()
1468        else:
1469            error_msg = self._try_extract_error_code(res)
1470            raise BoostedAPIException("Failed to retrieve rankings: {0}.".format(error_msg))
1471
1472    def getRankingsForDate(self, portfolio_id, date, rollback_to_last_available_date):
1473        date = self.__iso_format(date)
1474        endpoint = "latest-rankings" if rollback_to_last_available_date else "rankings"
1475        url = self.base_uri + "/api/{0}/{1}/{2}".format(endpoint, portfolio_id, date)
1476        headers = {"Authorization": "ApiKey " + self.api_key}
1477        logger.info("Retrieving rankings information for date {0}.".format(date))
1478        res = requests.get(url, headers=headers, **self._request_params)
1479        if res.ok:
1480            logger.info("Rankings retrieval successful.")
1481            return res.json()
1482        else:
1483            error_msg = self._try_extract_error_code(res)
1484            raise BoostedAPIException("Failed to retrieve rankings: {0}.".format(error_msg))
1485
1486    def sendModelRecalc(self, model_id):
1487        url = self.base_uri + "/api/models/{0}/recalc".format(model_id)
1488        logger.info("Sending model recalc request for model {0}".format(model_id))
1489        headers = {"Authorization": "ApiKey " + self.api_key}
1490        res = requests.put(url, headers=headers, **self._request_params)
1491        if not res.ok:
1492            error_msg = self._try_extract_error_code(res)
1493            logger.error(error_msg)
1494            raise BoostedAPIException(
1495                "Failed to send model recalc request - "
1496                + "the model in UI may be out of date: {0}.".format(error_msg)
1497            )
1498
1499    def sendRecalcAllModelPortfolios(self, model_id: str):
1500        """Recalculates all portfolios under a given model ID.
1501
1502        Args:
1503            model_id: the model ID
1504        Raises:
1505            BoostedAPIException: if the Boosted API request fails
1506        """
1507        url = self.base_uri + f"/api/models/{model_id}/recalc-all-portfolios"
1508        logger.info(f"Sending portfolio recalc requests for all portfolios under {model_id=}.")
1509        headers = {"Authorization": "ApiKey " + self.api_key}
1510        res = requests.put(url, headers=headers, **self._request_params)
1511        if not res.ok:
1512            error_msg = self._try_extract_error_code(res)
1513            logger.error(error_msg)
1514            raise BoostedAPIException(
1515                f"Failed to send recalc request for all portfolios under {model_id=} - {error_msg}."
1516            )
1517
1518    def sendPortfolioRecalc(self, portfolio_id: str):
1519        """Recalculates a single portfolio by its portfolio ID.
1520
1521        Args:
1522            portfolio_id: the portfolio ID to recalculate
1523        Raises:
1524            BoostedAPIException: if the Boosted API request fails
1525        """
1526        url = self.base_uri + "/api/graphql"
1527        logger.info(f"Sending portfolio recalc request for {portfolio_id=}.")
1528        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
1529        qry = """
1530            mutation recalcPortfolio($input: RecalculatePortfolioInput!) {
1531                recalculatePortfolio(input: $input) {
1532                    success
1533                    errors
1534                }
1535            }
1536            """
1537        req_json = {
1538            "query": qry,
1539            "variables": {"input": {"portfolioId": f"{portfolio_id}", "allowForceRecalc": "true"}},
1540        }
1541        res = requests.post(url, json=req_json, headers=headers, **self._request_params)
1542        if not res.ok or res.json().get("errors"):
1543            error_msg = self._try_extract_error_code(res)
1544            logger.error(error_msg)
1545            raise BoostedAPIException(
1546                f"Failed to send portfolio recalc request for {portfolio_id=} - {error_msg}."
1547            )
1548
1549    def add_uploaded_model_data(self, url, csv_data, request_data, timeout=600):
1550        logger.info("Starting upload.")
1551        headers = {"Authorization": "ApiKey " + self.api_key}
1552        files_req: Dict = {}
1553        target: Tuple[str, Any, str] = ("data.csv", None, "text/csv")
1554        warnings = []
1555        if isinstance(csv_data, pd.core.frame.DataFrame):
1556            buf = io.StringIO()
1557            csv_data.to_csv(buf, header=False)
1558            if not isinstance(csv_data.index, pd.core.indexes.datetimes.DatetimeIndex):
1559                raise BoostedAPIException("DataFrame must have DatetimeIndex as index type.")
1560            target = ("uploaded_data.csv", buf.getvalue(), "text/csv")
1561            files_req["dataFile"] = target
1562            res = requests.post(
1563                url,
1564                files=files_req,
1565                data=request_data,
1566                headers=headers,
1567                timeout=timeout,
1568                **self._request_params,
1569            )
1570        elif isinstance(csv_data, str):
1571            target = ("uploaded_data.csv", csv_data, "text/csv")
1572            files_req["dataFile"] = target
1573            res = requests.post(
1574                url,
1575                files=files_req,
1576                data=request_data,
1577                headers=headers,
1578                timeout=timeout,
1579                **self._request_params,
1580            )
1581        else:
1582            raise BoostedAPIException("Expected CSV as str or Pandas DataFrame.")
1583        if res.ok:
1584            logger.info("Signals upload completed.")
1585            result = res.json()["result"]
1586            if "warningMessages" in result:
1587                warnings = result["warningMessages"]
1588        else:
1589            error_str = "Signals upload failed: {0}, {1}".format(res.text, res.reason)
1590            logger.error(error_str)
1591            raise BoostedAPIException(error_str)
1592
1593        return res, warnings
1594
1595    def createSignalsModel(self, csv_data, model_name, timeout=600):
1596        warnings = []
1597        url = self.base_uri + "/api/models/upload/signals/create"
1598        request_data = {"modelName": model_name, "uploadName": model_name}
1599        res, warnings = self.add_uploaded_model_data(url, csv_data, request_data, timeout)
1600        result = res.json()["result"]
1601        model_id = result["modelId"]
1602        self.sendModelRecalc(model_id)
1603        return model_id, warnings
1604
1605    def addToUploadedModel(self, model_id, csv_data, timeout=600, recalc_model=True):
1606        warnings = []
1607        url = self.base_uri + "/api/models/{0}/upload/add-data".format(model_id)
1608        request_data: Dict = {}
1609        _, warnings = self.add_uploaded_model_data(url, csv_data, request_data, timeout)
1610        if recalc_model:
1611            self.sendModelRecalc(model_id)
1612        return warnings
1613
1614    def addSignalsToUploadedModel(
1615        self,
1616        model_id: str,
1617        csv_data: Union[pd.core.frame.DataFrame, str],
1618        timeout: int = 600,
1619        recalc_all: bool = False,
1620        recalc_portfolio_ids: Optional[List[str]] = None,
1621    ) -> List[str]:
1622        """
1623        Add signals to an uploaded model and then recalculate a random portfolio under that model.
1624
1625        Args:
1626            model_id: model ID
1627            csv_data: pandas DataFrame, or a string with signals to upload.
1628            timeout (optional): Timeout for initial upload request in seconds.
1629            recalc_all (optional): if True, recalculates all portfolios in the model.
1630            recalc_portfolio_ids (optional): List of portfolio IDs under the model to re-calculate.
1631        """
1632        warnings = self.addToUploadedModel(model_id, csv_data, timeout, recalc_model=False)
1633
1634        if recalc_all:
1635            self.sendRecalcAllModelPortfolios(model_id)
1636        elif recalc_portfolio_ids:
1637            for portfolio_id in recalc_portfolio_ids:
1638                self.sendPortfolioRecalc(portfolio_id)
1639        else:
1640            self.sendModelRecalc(model_id)
1641        return warnings
1642
1643    def getSignalsFromUploadedModel(self, model_id, date=None):
1644        date = self.__iso_format(date)
1645        url = self.base_uri + "/api/models/{0}/upload/signals".format(model_id)
1646        headers = {"Authorization": "ApiKey " + self.api_key}
1647        params = {"date": date}
1648        logger.info("Retrieving uploaded signals information")
1649        res = requests.get(url, params=params, headers=headers, **self._request_params)
1650        if res.ok:
1651            result = pd.DataFrame.from_dict(res.json()["result"])
1652            # ensure column order
1653            result = result[["date", "isin", "country", "currency", "weight"]]
1654            result["date"] = pd.to_datetime(result["date"], format="%Y-%m-%d")
1655            result = result.set_index("date")
1656            logger.info("Signals retrieval successful.")
1657            return result
1658        else:
1659            error_msg = self._try_extract_error_code(res)
1660            raise BoostedAPIException("Failed to retrieve signals: {0}.".format(error_msg))
1661
1662    def getPortfolioSettings(self, portfolio_id, timeout=600):
1663        url = self.base_uri + "/api/portfolio-settings/{0}".format(portfolio_id)
1664        headers = {"Authorization": "ApiKey " + self.api_key}
1665        res = requests.get(url, headers=headers, **self._request_params)
1666        if res.ok:
1667            return PortfolioSettings(res.json())
1668        else:
1669            error_msg = self._try_extract_error_code(res)
1670            logger.error(error_msg)
1671            raise BoostedAPIException(
1672                "Failed to retrieve portfolio settings: {0}.".format(error_msg)
1673            )
1674
1675    def createPortfolioWithPortfolioSettings(
1676        self, model_id, portfolio_name, portfolio_description, portfolio_settings, timeout=600
1677    ):
1678        url = self.base_uri + "/api/models/{0}/constraints/add".format(model_id)
1679        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
1680        setting_string = json.dumps(portfolio_settings.settings)
1681        logger.info("Creating new portfolio with specified setting: {}".format(setting_string))
1682        params = {
1683            "name": portfolio_name,
1684            "description": portfolio_description,
1685            "settings": setting_string,
1686            "validate": "true",
1687        }
1688        res = requests.put(url, json=params, headers=headers, **self._request_params)
1689        response = res.json()
1690        if res.ok:
1691            return response
1692        else:
1693            error_msg = self._try_extract_error_code(res)
1694            logger.error(error_msg)
1695            raise BoostedAPIException(
1696                "Failed to create portfolio with the specified settings: {0}.".format(error_msg)
1697            )
1698
1699    def getGbiIdFromIdentCountryCurrencyDate(
1700        self, ident_country_currency_dates: List[DateIdentCountryCurrency], timeout: int = 600
1701    ) -> List[Optional[GbiIdSecurity]]:
1702        url = self.base_uri + "/api/custom-stock-data/map-identifiers-simple"
1703        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
1704        identifiers = [
1705            {
1706                "row": idx,
1707                "date": identifier.date,
1708                "isin": identifier.identifier if identifier.id_type == ColumnSubRole.ISIN else None,
1709                "symbol": (
1710                    identifier.identifier if identifier.id_type == ColumnSubRole.SYMBOL else None
1711                ),
1712                "countryPreference": identifier.country,
1713                "currencyPreference": identifier.currency,
1714            }
1715            for idx, identifier in enumerate(ident_country_currency_dates)
1716        ]
1717        params = json.dumps({"identifiers": identifiers})
1718        logger.info(
1719            "Retrieving GBI-ID mapping for {} identifier tuples...".format(
1720                len(ident_country_currency_dates)
1721            )
1722        )
1723        res = requests.post(url, data=params, headers=headers, **self._request_params)
1724
1725        if res.ok:
1726            result = res.json()
1727            warnings = result["warnings"]
1728            if warnings:
1729                for warning in warnings:
1730                    logger.warn(f"Mapping warning: {warning}")
1731            gbiSecurities = []
1732            for idx, ident in enumerate(result["mappedIdentifiers"]):
1733                if ident is None:
1734                    security = None
1735                else:
1736                    security = GbiIdSecurity(
1737                        ident["gbiId"],
1738                        ident_country_currency_dates[idx],
1739                        ident["symbol"],
1740                        ident["companyName"],
1741                    )
1742                gbiSecurities.append(security)
1743
1744            return gbiSecurities
1745        else:
1746            error_msg = self._try_extract_error_code(res)
1747            raise BoostedAPIException(
1748                "Failed to retrieve identifier mappings: {0}.".format(error_msg)
1749            )
1750
1751    # exists for backwards compatibility purposes.
1752    def getGbiIdFromIsinCountryCurrencyDate(self, isin_country_currency_dates, timeout=600):
1753        return self.getGbiIdFromIdentCountryCurrencyDate(
1754            ident_country_currency_dates=isin_country_currency_dates, timeout=timeout
1755        )
1756
1757    # model_id: str
1758    # returns: Dict[str, str] representing the translation from the rankings ID (feature refs)
1759    # to human readable names
1760    def __get_rankings_ref_translation(self, model_id: str) -> Dict[str, str]:
1761        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
1762        feature_name_url = f"/api/models/{model_id}/advanced-explain/translate-feature-ref/"
1763        feature_name_res = requests.post(
1764            self.base_uri + feature_name_url,
1765            data=json.dumps({}),
1766            headers=headers,
1767            **self._request_params,
1768        )
1769
1770        if feature_name_res.ok:
1771            feature_name_dict = feature_name_res.json()
1772            return {
1773                id: "-".join(
1774                    [names["variable_name"], names["transform_name"], names["normalization_name"]]
1775                )
1776                for id, names in feature_name_dict.items()
1777            }
1778        else:
1779            raise Exception(
1780                """Failed to get feature names for model,
1781                    this model doesn't fully support rankings 2.0"""
1782            )
1783
1784    def getDatasetDates(self, dataset_id):
1785        url = self.base_uri + f"/api/datasets/{dataset_id}"
1786        headers = {"Authorization": "ApiKey " + self.api_key}
1787        res = requests.get(url, headers=headers, **self._request_params)
1788        if res.ok:
1789            dataset = res.json()
1790            valid_to_array = dataset.get("validTo")
1791            valid_to_date = None
1792            valid_from_array = dataset.get("validFrom")
1793            valid_from_date = None
1794            if valid_to_array:
1795                valid_to_date = datetime.date(
1796                    valid_to_array[0], valid_to_array[1], valid_to_array[2]
1797                )
1798            if valid_from_array:
1799                valid_from_date = datetime.date(
1800                    valid_from_array[0], valid_from_array[1], valid_from_array[2]
1801                )
1802            return {"validTo": valid_to_date, "validFrom": valid_from_date}
1803        else:
1804            error_msg = self._try_extract_error_code(res)
1805            logger.error(error_msg)
1806            raise BoostedAPIException("Failed to query dataset: {0}.".format(error_msg))
1807
1808    def getRankingAnalysis(self, model_id, date):
1809        url = (
1810            self.base_uri
1811            + f"/api/explain-trades/analysis/{model_id}/{self.__iso_format(date)}/json"
1812        )
1813        headers = {"Authorization": "ApiKey " + self.api_key}
1814        analysis_res = requests.get(url, headers=headers, **self._request_params)
1815        if analysis_res.ok:
1816            ranking_dict = analysis_res.json()
1817            feature_name_dict = self.__get_rankings_ref_translation(model_id)
1818            columns = [feature_name_dict[col] for col in ranking_dict["columns"]]
1819
1820            df = protoCubeJsonDataToDataFrame(
1821                ranking_dict["data"],
1822                "Data Buckets",
1823                ranking_dict["rows"],
1824                "Feature Names",
1825                columns,
1826                ranking_dict["fields"],
1827            )
1828            return df
1829        else:
1830            error_msg = self._try_extract_error_code(analysis_res)
1831            logger.error(error_msg)
1832            raise BoostedAPIException("Failed to get ranking analysis: {0}.".format(error_msg))
1833
1834    def getExplainForPortfolio(
1835        self,
1836        model_id,
1837        portfolio_id,
1838        date,
1839        index_by_symbol: bool = False,
1840        index_by_all_metadata: bool = False,
1841    ):
1842        """
1843        Gets the ranking 2.0 explain data for the given model on the given date
1844        filtered by portfolio.
1845
1846        Parameters
1847        ----------
1848        model_id: str
1849            Model ID.  Model IDs can be retrieved by clicking on the copy to clipboard
1850            button next to your model's name in the Model Summary Page in Boosted
1851            Insights.
1852        portfolio_id: str
1853            Portfolio ID.  Portfolio IDs can be retrieved from portfolio's configuration page.
1854        date: datetime.date or YYYY-MM-DD string
1855            Date of the data to retrieve.
1856        index_by_symbol: bool
1857            If true, index by stock symbol instead of ISIN.
1858        index_by_all_metadata: bool
1859            If true, index by all metadata: ISIN, stock symbol, currency, and country.
1860            Overrides index_by_symbol.
1861
1862        Returns
1863        -------
1864        pandas.DataFrame
1865            Pandas DataFrame containing your data indexed by ISINs/Symbol/all metadata
1866            and feature names, filtered by portfolio.
1867        ___
1868        """
1869        indices = ["Symbol", "ISINs", "Country", "Currency"]
1870        raw_explain_df = self.getRankingExplain(
1871            model_id, date, index_by_symbol=False, index_by_all_metadata=True
1872        )
1873        pa_ratings_dict = self.getRankingsForDate(portfolio_id, date, False)
1874
1875        ratings = pa_ratings_dict["rankings"]
1876        ratings_df = pd.DataFrame(ratings)
1877        ratings_df = ratings_df[["symbol", "isin", "country", "currency"]]
1878        ratings_df.columns = pd.Index(indices)
1879        ratings_df.set_index(indices, inplace=True)
1880
1881        # inner join to only get the securities in both data frames
1882        result_df = raw_explain_df.merge(ratings_df, left_index=True, right_index=True, how="inner")
1883
1884        # set index based on input parameters
1885        if index_by_symbol and not index_by_all_metadata:
1886            result_df = result_df.reset_index()
1887            result_df = result_df.drop(columns=["ISINs", "Currency", "Country"])
1888            result_df.set_index(["Symbol", "Feature Names"], inplace=True)
1889        elif not index_by_symbol and not index_by_all_metadata:
1890            result_df = result_df.reset_index()
1891            result_df = result_df.drop(columns=["Symbol", "Currency", "Country"])
1892            result_df.set_index(["ISINs", "Feature Names"], inplace=True)
1893
1894        return result_df
1895
1896    def getRankingExplain(
1897        self, model_id, date, index_by_symbol: bool = False, index_by_all_metadata: bool = False
1898    ):
1899        """
1900        Gets the ranking 2.0 explain data for the given model on the given date
1901
1902        Parameters
1903        ----------
1904        model_id: str
1905            Model ID.  Model IDs can be retrieved by clicking on the copy to clipboard
1906            button next to your model's name in the Model Summary Page in Boosted
1907            Insights.
1908        date: datetime.date or YYYY-MM-DD string
1909            Date of the data to retrieve.
1910        index_by_symbol: bool
1911            If true, index by stock symbol instead of ISIN.
1912        index_by_all_metadata: bool
1913            If true, index by all metadata: ISIN, stock symbol, currency, and country.
1914            Overrides index_by_symbol.
1915
1916        Returns
1917        -------
1918        pandas.DataFrame
1919            Pandas DataFrame containing your data indexed by ISINs/Symbol/all metadata
1920            and feature names.
1921        ___
1922        """
1923        url = (
1924            self.base_uri + f"/api/explain-trades/explain/{model_id}/{self.__iso_format(date)}/json"
1925        )
1926        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
1927        explain_res = requests.get(url, headers=headers, **self._request_params)
1928        if explain_res.ok:
1929            ranking_dict = explain_res.json()
1930            rows = ranking_dict["rows"]
1931            stock_summary_url = f"/api/stock-summaries/{model_id}"
1932            stock_summary_body = {"gbiIds": ranking_dict["rows"]}
1933            summary_res = requests.post(
1934                self.base_uri + stock_summary_url,
1935                data=json.dumps(stock_summary_body),
1936                headers=headers,
1937                **self._request_params,
1938            )
1939            if summary_res.ok:
1940                stock_summary = summary_res.json()
1941                if index_by_symbol:
1942                    rows = [stock_summary[row]["symbol"] for row in ranking_dict["rows"]]
1943                elif index_by_all_metadata:
1944                    rows = [
1945                        [
1946                            stock_summary[row]["isin"],
1947                            stock_summary[row]["symbol"],
1948                            stock_summary[row]["currency"],
1949                            stock_summary[row]["country"],
1950                        ]
1951                        for row in ranking_dict["rows"]
1952                    ]
1953                else:
1954                    rows = [stock_summary[row]["isin"] for row in ranking_dict["rows"]]
1955            else:
1956                error_msg = self._try_extract_error_code(summary_res)
1957                logger.error(error_msg)
1958                raise BoostedAPIException(
1959                    "Failed to get isin information ranking explain: {0}.".format(error_msg)
1960                )
1961
1962            feature_name_dict = self.__get_rankings_ref_translation(model_id)
1963            columns = [feature_name_dict[col] for col in ranking_dict["columns"]]
1964
1965            id_col_name = "Symbols" if index_by_symbol else "ISINs"
1966
1967            if index_by_all_metadata:
1968                pc_list = []
1969                pf = ranking_dict["data"]
1970                for row_idx, row in enumerate(rows):
1971                    for col_idx, col in enumerate(columns):
1972                        pc_list.append([row, col] + pf[row_idx]["columns"][col_idx]["fields"])
1973                df = pd.DataFrame(pc_list)
1974                df = df.set_axis(
1975                    ["Metadata", "Feature Names"] + ranking_dict["fields"], axis="columns"
1976                )
1977
1978                metadata_df = df["Metadata"].apply(pd.Series)
1979                metadata_df.columns = pd.Index(["ISINs", "Symbol", "Currency", "Country"])
1980                result_df = pd.concat([metadata_df, df], axis=1).drop("Metadata", axis=1)
1981                result_df.set_index(
1982                    ["ISINs", "Symbol", "Currency", "Country", "Feature Names"], inplace=True
1983                )
1984                return result_df
1985
1986            else:
1987                df = protoCubeJsonDataToDataFrame(
1988                    ranking_dict["data"],
1989                    id_col_name,
1990                    rows,
1991                    "Feature Names",
1992                    columns,
1993                    ranking_dict["fields"],
1994                )
1995
1996                return df
1997        else:
1998            error_msg = self._try_extract_error_code(explain_res)
1999            logger.error(error_msg)
2000            raise BoostedAPIException("Failed to get ranking explain: {0}.".format(error_msg))
2001
2002    def getDenseSignalsForDate(self, portfolio_id, date, rollback_to_last_available_date):
2003        date = self.__iso_format(date)
2004        url = self.base_uri + f"/api/portfolios/{portfolio_id}/denseSignalsByDate"
2005        headers = {"Authorization": "ApiKey " + self.api_key}
2006        params = {
2007            "startDate": date,
2008            "endDate": date,
2009            "rollbackToMostRecentDate": rollback_to_last_available_date,
2010        }
2011        logger.info("Retrieving dense signals information for date {0}.".format(date))
2012        res = requests.get(url, params=params, headers=headers, **self._request_params)
2013        if res.ok:
2014            logger.info("Signals retrieval successful.")
2015            d = res.json()
2016            # reshape date to output format
2017            date = list(d["signals"].keys())[0]
2018            model_id = d["model_id"]
2019            signals_list = list(d["signals"].values())[0]
2020            return {"date": date, "signals": [{"model_id": model_id, "signals_info": signals_list}]}
2021        else:
2022            error_msg = self._try_extract_error_code(res)
2023            raise BoostedAPIException("Failed to retrieve dense signals: {0}.".format(error_msg))
2024
2025    def getDenseSignals(self, model_id, portfolio_id, file_name=None, location="./"):
2026        url = self.base_uri + f"/api/models/{model_id}/{portfolio_id}/dense-signals"
2027        headers = {"Authorization": "ApiKey " + self.api_key}
2028        res = requests.get(url, headers=headers, **self._request_params)
2029        if file_name is None:
2030            file_name = f"{model_id}-{portfolio_id}_dense_signals.csv"
2031        download_location = os.path.join(location, file_name)
2032        if res.ok:
2033            with open(download_location, "wb") as file:
2034                file.write(res.content)
2035            print("Download Complete")
2036        elif res.status_code == 404:
2037            raise BoostedAPIException(
2038                f"""Dense Singals file does not exist for model:
2039                 {model_id} - portfolio: {portfolio_id}"""
2040            )
2041        else:
2042            error_msg = self._try_extract_error_code(res)
2043            logger.error(error_msg)
2044            raise BoostedAPIException(
2045                f"""Failed to download dense singals file for model:
2046                 {model_id} - portfolio: {portfolio_id}"""
2047            )
2048
2049    def _getIsPortfolioReadyForProcessing(self, model_id, portfolio_id, formatted_date):
2050        headers = {"Authorization": "ApiKey " + self.api_key}
2051        url = (
2052            self.base_uri
2053            + f"/api/explain-trades/{model_id}/{portfolio_id}"
2054            + f"/is-ready-for-processing/{formatted_date}"
2055        )
2056        res = requests.get(url, headers=headers, **self._request_params)
2057
2058        try:
2059            if res.ok:
2060                body = res.json()
2061                if "ready" in body:
2062                    if body["ready"]:
2063                        return True, ""
2064                    else:
2065                        reason_from_api = (
2066                            body["notReadyReason"] if "notReadyReason" in body else "Unavailable"
2067                        )
2068
2069                        returned_reason = reason_from_api
2070
2071                        if returned_reason == "SKIP":
2072                            returned_reason = "holiday- market closed"
2073
2074                        if returned_reason == "WAITING":
2075                            returned_reason = "calculations pending"
2076
2077                        return False, returned_reason
2078                else:
2079                    return False, "Unavailable"
2080            else:
2081                error_msg = self._try_extract_error_code(res)
2082                logger.error(error_msg)
2083                raise BoostedAPIException(
2084                    f"""Failed to generate file for model:
2085                    {model_id} - portfolio: {portfolio_id} on date: {formatted_date}"""
2086                )
2087        except Exception as e:
2088            raise BoostedAPIException(
2089                f"""Failed to generate file for model:
2090                {model_id} - portfolio: {portfolio_id} on date: {formatted_date} {e}"""
2091            )
2092
2093    def getRanking2DateAnalysisFile(
2094        self, model_id, portfolio_id, date, file_name=None, location="./"
2095    ):
2096        formatted_date = self.__iso_format(date)
2097        s3_file_name = f"{formatted_date}_analysis.xlsx"
2098        download_url = (
2099            self.base_uri + f"/api/models/{model_id}/{portfolio_id}/ranking-file/{s3_file_name}"
2100        )
2101        headers = {"Authorization": "ApiKey " + self.api_key}
2102        if file_name is None:
2103            file_name = f"{model_id}-{portfolio_id}_statistical_analysis_{formatted_date}.xlsx"
2104        download_location = os.path.join(location, file_name)
2105
2106        res = requests.get(download_url, headers=headers, **self._request_params)
2107        if res.ok:
2108            with open(download_location, "wb") as file:
2109                file.write(res.content)
2110            print("Download Complete")
2111        elif res.status_code == 404:
2112            (
2113                is_portfolio_ready_for_processing,
2114                portfolio_ready_status,
2115            ) = self._getIsPortfolioReadyForProcessing(model_id, portfolio_id, formatted_date)
2116
2117            if not is_portfolio_ready_for_processing:
2118                logger.info(
2119                    f"""\nPortfolio {portfolio_id} for model {model_id}
2120                    on date {date} unavailable for Ranking2Date Analysis file.
2121                    Status: {portfolio_ready_status}\n"""
2122                )
2123                return
2124
2125            generate_url = (
2126                self.base_uri
2127                + f"/api/explain-trades/{model_id}/{portfolio_id}"
2128                + f"/generate/date-data/{formatted_date}"
2129            )
2130
2131            generate_res = requests.get(generate_url, headers=headers, **self._request_params)
2132            if generate_res.ok:
2133                download_res = requests.get(download_url, headers=headers, **self._request_params)
2134                while download_res.status_code == 404 or (
2135                    download_res.ok and len(download_res.content) == 0
2136                ):
2137                    print("waiting for file to be generated")
2138                    time.sleep(5)
2139                    download_res = requests.get(
2140                        download_url, headers=headers, **self._request_params
2141                    )
2142                if download_res.ok:
2143                    with open(download_location, "wb") as file:
2144                        file.write(download_res.content)
2145                    print("Download Complete")
2146            else:
2147                error_msg = self._try_extract_error_code(res)
2148                logger.error(error_msg)
2149                raise BoostedAPIException(
2150                    f"""Failed to generate ranking analysis file for model:
2151                    {model_id} - portfolio: {portfolio_id} on date: {formatted_date}"""
2152                )
2153        else:
2154            error_msg = self._try_extract_error_code(res)
2155            logger.error(error_msg)
2156            raise BoostedAPIException(
2157                f"""Failed to download ranking analysis file for model:
2158                 {model_id} - portfolio: {portfolio_id} on date: {formatted_date}"""
2159            )
2160
2161    def getRanking2DateExplainFile(
2162        self,
2163        model_id,
2164        portfolio_id,
2165        date,
2166        file_name=None,
2167        location="./",
2168        overwrite: bool = False,
2169        index_by_all_metadata: bool = False,
2170    ):
2171        """
2172        Downloads the ranking explain file for the provided portfolio and model.
2173        If no file exists then it will send a request to generate the file and continuously
2174        poll the server every 5 seconds to try and download the file until the file is downloaded.
2175
2176        Parameters
2177        ----------
2178        model_id: str
2179            Model ID.  Model IDs can be retrieved by clicking on the copy to clipboard
2180            button next to your model's name in the Model Summary Page in Boosted
2181            Insights.
2182        portfolio_id: str
2183            Portfolio ID.  Portfolio IDs can be retrieved from portfolio's configuration page.
2184        date: datetime.date or YYYY-MM-DD string
2185            Date of the data to retrieve.
2186        file_name: str
2187            File name of the dense signals file to save as.
2188            If no file name is given the file name will be
2189            "<model_id>-<portfolio_id>_explain_data_<date>.xlsx"
2190        location: str
2191            The location to save the file to.
2192            If no location is given then it will be saved to the current directory.
2193        overwrite: bool
2194            Defaults to False, set to True to regenerate the file.
2195        index_by_all_metadata: bool
2196            If true, index by all metadata: ISIN, stock symbol, currency, and country.
2197
2198
2199        Returns
2200        -------
2201        None
2202        ___
2203        """
2204        formatted_date = self.__iso_format(date)
2205        if index_by_all_metadata:
2206            s3_file_name = f"{formatted_date}_explaindata_withmetadata.xlsx"
2207        else:
2208            s3_file_name = f"{formatted_date}_explaindata.xlsx"
2209        download_url = (
2210            self.base_uri + f"/api/models/{model_id}/{portfolio_id}/ranking-file/{s3_file_name}"
2211        )
2212        headers = {"Authorization": "ApiKey " + self.api_key}
2213        if file_name is None:
2214            file_name = f"{model_id}-{portfolio_id}_explain_data_{formatted_date}.xlsx"
2215        download_location = os.path.join(location, file_name)
2216
2217        if not overwrite:
2218            res = requests.get(download_url, headers=headers, **self._request_params)
2219        if not overwrite and res.ok:
2220            with open(download_location, "wb") as file:
2221                file.write(res.content)
2222            print("Download Complete")
2223        elif overwrite or res.status_code == 404:
2224            (
2225                is_portfolio_ready_for_processing,
2226                portfolio_ready_status,
2227            ) = self._getIsPortfolioReadyForProcessing(model_id, portfolio_id, formatted_date)
2228
2229            if not is_portfolio_ready_for_processing:
2230                logger.info(
2231                    f"""\nPortfolio {portfolio_id} for model {model_id}
2232                    on date {date} unavailable for Ranking2Date Explain file.
2233                    Status: {portfolio_ready_status}\n"""
2234                )
2235                return
2236
2237            generate_url = (
2238                self.base_uri
2239                + f"/api/explain-trades/{model_id}/{portfolio_id}"
2240                + f"/generate/date-data/{formatted_date}"
2241                + f"/{'true' if index_by_all_metadata else 'false'}"
2242            )
2243
2244            generate_res = requests.get(generate_url, headers=headers, **self._request_params)
2245            if generate_res.ok:
2246                download_res = requests.get(download_url, headers=headers, **self._request_params)
2247                while download_res.status_code == 404 or (
2248                    download_res.ok and len(download_res.content) == 0
2249                ):
2250                    print("waiting for file to be generated")
2251                    time.sleep(5)
2252                    download_res = requests.get(
2253                        download_url, headers=headers, **self._request_params
2254                    )
2255                if download_res.ok:
2256                    with open(download_location, "wb") as file:
2257                        file.write(download_res.content)
2258                    print("Download Complete")
2259            else:
2260                error_msg = self._try_extract_error_code(res)
2261                logger.error(error_msg)
2262                raise BoostedAPIException(
2263                    f"""Failed to generate ranking explain file for model:
2264                    {model_id} - portfolio: {portfolio_id} on date: {formatted_date}"""
2265                )
2266        else:
2267            error_msg = self._try_extract_error_code(res)
2268            logger.error(error_msg)
2269            raise BoostedAPIException(
2270                f"""Failed to download ranking explain file for model:
2271                 {model_id} - portfolio: {portfolio_id} on date: {formatted_date}"""
2272            )
2273
2274    def getRanking2DateExplain(
2275        self,
2276        model_id: str,
2277        portfolio_id: str,
2278        date: Optional[datetime.date],
2279        overwrite: bool = False,
2280    ) -> Dict[str, pd.DataFrame]:
2281        """
2282        Wrapper around getRanking2DateExplainFile, but returns a pandas
2283        dataframe instead of downloading to a path. Dataframe is indexed by
2284        symbol and should always have 'rating' and 'rating_delta' columns. Other
2285        columns will be determined by model's features.
2286        """
2287        file_name = "explaindata.xlsx"
2288        with tempfile.TemporaryDirectory() as tmpdirname:
2289            self.getRanking2DateExplainFile(
2290                model_id=model_id,
2291                portfolio_id=portfolio_id,
2292                date=date,
2293                file_name=file_name,
2294                location=tmpdirname,
2295                overwrite=overwrite,
2296            )
2297            full_path = os.path.join(tmpdirname, file_name)
2298            excel_file = pd.ExcelFile(full_path)
2299            df_map = pd.read_excel(excel_file, sheet_name=None)
2300            df_map_final = {str(sheet): df.set_index("Symbol") for (sheet, df) in df_map.items()}
2301
2302        return df_map_final
2303
2304    def getTearSheet(self, model_id, portfolio_id, start_date=None, end_date=None, block=False):
2305        if start_date is None or end_date is None:
2306            if start_date is not None or end_date is not None:
2307                raise ValueError("start_date and end_date must both be None or both be defined")
2308            return self._getCurrentTearSheet(model_id, portfolio_id)
2309
2310        start_date_obj = self.__to_date_obj(start_date)
2311        end_date_obj = self.__to_date_obj(end_date)
2312        if start_date_obj >= end_date_obj:
2313            raise ValueError("end_date must be later than the start_date")
2314
2315        # get for the given date
2316        url = self.base_uri + f"/api/analysis/keyfacts/{model_id}/{portfolio_id}"
2317        data = {
2318            "startDate": self.__iso_format(start_date),
2319            "endDate": self.__iso_format(end_date),
2320            "shouldRecalc": True,
2321        }
2322        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
2323        res = requests.post(url, data=json.dumps(data), headers=headers, **self._request_params)
2324        if res.status_code == 404 and block:
2325            retries = 0
2326            data["shouldRecalc"] = False
2327            while retries < 10:
2328                time.sleep(10)
2329                retries += 1
2330                res = requests.post(
2331                    url, data=json.dumps(data), headers=headers, **self._request_params
2332                )
2333                if res.status_code != 404:
2334                    break
2335        if res.ok:
2336            return res.json()
2337        else:
2338            error_msg = self._try_extract_error_code(res)
2339            logger.error(error_msg)
2340            raise BoostedAPIException(
2341                "Failed to get tear sheet data: {0} {1}.".format(error_msg, str(res.status_code))
2342            )
2343
2344    def _getCurrentTearSheet(self, model_id, portfolio_id):
2345        url = self.base_uri + f"/api/model-summaries/{model_id}/{portfolio_id}"
2346        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
2347        res = requests.get(url, headers=headers, **self._request_params)
2348        if res.ok:
2349            json = res.json()
2350            return json.get("tearSheet", {})
2351        else:
2352            error_msg = self._try_extract_error_code(res)
2353            logger.error(error_msg)
2354            raise BoostedAPIException("Failed to get tear sheet data: {0}.".format(error_msg))
2355
2356    def getPortfolioStatus(self, model_id, portfolio_id, job_date):
2357        url = (
2358            self.base_uri
2359            + f"/api/analysis/portfolioStatus/{model_id}/{portfolio_id}?jobDate={job_date}"
2360        )
2361        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
2362        res = requests.get(url, headers=headers, **self._request_params)
2363        if res.ok:
2364            result = res.json()
2365            return {
2366                "is_complete": result["status"],
2367                "last_update": None if result["lastUpdate"] is None else result["lastUpdate"][:10],
2368                "next_update": None if result["nextUpdate"] is None else result["nextUpdate"][:10],
2369            }
2370        else:
2371            error_msg = self._try_extract_error_code(res)
2372            logger.error(error_msg)
2373            raise BoostedAPIException("Failed to get portfolio status: {0}".format(error_msg))
2374
2375    def _query_portfolio_factor_attribution(
2376        self,
2377        portfolio_id: str,
2378        start_date: Optional[BoostedDate] = None,
2379        end_date: Optional[BoostedDate] = None,
2380    ):
2381        response = self._get_graphql(
2382            query=graphql_queries.GET_PORTFOLIO_FACTOR_ATTRIBUTION_QUERY,
2383            variables={
2384                "portfolioId": portfolio_id,
2385                "startDate": str(start_date) if start_date else None,
2386                "endDate": str(end_date) if start_date else None,
2387            },
2388            error_msg_prefix="Failed to get factor attribution: ",
2389        )
2390        return response
2391
2392    def get_portfolio_factor_attribution(
2393        self,
2394        portfolio_id: str,
2395        start_date: Optional[BoostedDate] = None,
2396        end_date: Optional[BoostedDate] = None,
2397    ):
2398        """Get portfolio factor attribution for a portfolio
2399
2400        Args:
2401            portfolio_id (str): a valid UUID string
2402            start_date (BoostedDate, optional): The start date. Defaults to None.
2403            end_date (BoostedDate, optional): The end date. Defaults to None.
2404        """
2405        response = self._query_portfolio_factor_attribution(portfolio_id, start_date, end_date)
2406        factor_attribution = response["data"]["portfolio"]["factorAttribution"]
2407        dates = pd.DatetimeIndex(data=factor_attribution["dates"])
2408        beta = factor_attribution["factorBetas"]
2409        beta_df = pd.DataFrame(index=dates, data={x["name"]: x["data"] for x in beta})
2410        beta_df = beta_df.add_suffix("_beta")
2411        returns = factor_attribution["portfolioFactorPerformance"]
2412        returns_df = pd.DataFrame(index=dates, data={x["name"]: x["data"] for x in returns})
2413        returns_df = returns_df.add_suffix("_return")
2414        returns_df = (returns_df - 1) * 100
2415
2416        final_df = pd.concat([returns_df, beta_df], axis=1)
2417        ordered_columns = list(itertools.chain(*zip(returns_df.columns, beta_df.columns)))
2418        ordered_final_df = final_df.reindex(columns=ordered_columns)
2419
2420        # Add the column `total_return` which is the sum of returns_data
2421        ordered_final_df["total_return"] = returns_df.sum(axis=1)
2422        return ordered_final_df
2423
2424    def getBlacklist(self, blacklist_id):
2425        url = self.base_uri + f"/api/blacklist/{blacklist_id}"
2426        headers = {"Authorization": "ApiKey " + self.api_key}
2427        res = requests.get(url, headers=headers, **self._request_params)
2428        if res.ok:
2429            result = res.json()
2430            return result
2431        error_msg = self._try_extract_error_code(res)
2432        logger.error(error_msg)
2433        raise BoostedAPIException(f"Failed to get blacklist with id {blacklist_id}: {error_msg}")
2434
2435    def getBlacklists(self, model_id=None, company_id=None, last_N=None):
2436        params = {}
2437        if last_N:
2438            params["lastN"] = last_N
2439        if model_id:
2440            params["modelId"] = model_id
2441        if company_id:
2442            params["companyId"] = company_id
2443        url = self.base_uri + f"/api/blacklist"
2444        headers = {"Authorization": "ApiKey " + self.api_key}
2445        res = requests.get(url, headers=headers, params=params, **self._request_params)
2446        if res.ok:
2447            result = res.json()
2448            return result
2449        error_msg = self._try_extract_error_code(res)
2450        logger.error(error_msg)
2451        raise BoostedAPIException(
2452            f"""Failed to get blacklists with \
2453            model_id {model_id} company_id {company_id} last_N {last_N}: {error_msg}"""
2454        )
2455
2456    def createBlacklist(
2457        self,
2458        isin,
2459        long_short=2,
2460        start_date=datetime.date.today(),
2461        end_date="4000-01-01",
2462        model_id=None,
2463    ):
2464        url = self.base_uri + f"/api/blacklist"
2465        data = {
2466            "modelId": model_id,
2467            "isin": isin,
2468            "longShort": long_short,
2469            "startDate": self.__iso_format(start_date),
2470            "endDate": self.__iso_format(end_date),
2471        }
2472        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
2473        res = requests.post(url, data=json.dumps(data), headers=headers, **self._request_params)
2474        if res.ok:
2475            return res.json()
2476        else:
2477            error_msg = self._try_extract_error_code(res)
2478            logger.error(error_msg)
2479            raise BoostedAPIException(
2480                f"""Failed to create the blacklist with \
2481                  isin {isin} long_short {long_short} start_date {start_date} end_date {end_date} \
2482                  model_id {model_id}: {error_msg}."""
2483            )
2484
2485    def createBlacklistsFromCSV(self, csv_name):
2486        url = self.base_uri + f"/api/blacklists"
2487        data = []
2488        with open(csv_name, mode="r") as f:
2489            csv_reader = csv.DictReader(f)
2490            for row in csv_reader:
2491                blacklist = {"modelId": row["ModelID"], "isin": row["ISIN"]}
2492                if not row.get("LongShort"):
2493                    blacklist["longShort"] = 2
2494                else:
2495                    blacklist["longShort"] = row["LongShort"]
2496
2497                if not row.get("StartDate"):
2498                    blacklist["startDate"] = self.__iso_format(datetime.date.today())
2499                else:
2500                    blacklist["startDate"] = self.__iso_format(row["StartDate"])
2501
2502                if not row.get("EndDate"):
2503                    blacklist["endDate"] = self.__iso_format("4000-01-01")
2504                else:
2505                    blacklist["endDate"] = self.__iso_format(row["EndDate"])
2506                data.append(blacklist)
2507        print(f"Processed {len(data)} blacklists.")
2508        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
2509        res = requests.post(url, data=json.dumps(data), headers=headers, **self._request_params)
2510        if res.ok:
2511            return res.json()
2512        else:
2513            error_msg = self._try_extract_error_code(res)
2514            logger.error(error_msg)
2515            raise BoostedAPIException("failed to create blacklists")
2516
2517    def updateBlacklist(self, blacklist_id, long_short=None, start_date=None, end_date=None):
2518        params = {}
2519        if long_short:
2520            params["longShort"] = long_short
2521        if start_date:
2522            params["startDate"] = start_date
2523        if end_date:
2524            params["endDate"] = end_date
2525        url = self.base_uri + f"/api/blacklist/{blacklist_id}"
2526        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
2527        res = requests.patch(url, json=params, headers=headers, **self._request_params)
2528        if res.ok:
2529            return res.json()
2530        else:
2531            error_msg = self._try_extract_error_code(res)
2532            logger.error(error_msg)
2533            raise BoostedAPIException(
2534                f"Failed to update blacklist with id {blacklist_id}: {error_msg}"
2535            )
2536
2537    def deleteBlacklist(self, blacklist_id):
2538        url = self.base_uri + f"/api/blacklist/{blacklist_id}"
2539        headers = {"Authorization": "ApiKey " + self.api_key}
2540        res = requests.delete(url, headers=headers, **self._request_params)
2541        if res.ok:
2542            result = res.json()
2543            return result
2544        else:
2545            error_msg = self._try_extract_error_code(res)
2546            logger.error(error_msg)
2547            raise BoostedAPIException(
2548                f"Failed to delete blacklist with id {blacklist_id}: {error_msg}"
2549            )
2550
2551    def getFeatureImportance(self, model_id, date, N=None):
2552        url = self.base_uri + f"/api/analysis/explainability/{model_id}"
2553        headers = {"Authorization": "ApiKey " + self.api_key}
2554        logger.info("Retrieving rankings information for date {0}.".format(date))
2555        res = requests.get(url, headers=headers, **self._request_params)
2556        if not res.ok:
2557            error_msg = self._try_extract_error_code(res)
2558            logger.error(error_msg)
2559            raise BoostedAPIException(
2560                f"Failed to fetch feature importance for model/portfolio {model_id}: {error_msg}"
2561            )
2562
2563        json_data = res.json()
2564        if "all" not in json_data.keys() or not json_data["all"]:
2565            raise BoostedAPIException(f"Unexpected formatting of feature importance response")
2566
2567        feature_data = json_data["all"]
2568        # find the right period (assuming returned json has dates in descending order)
2569        date_obj = self.__to_date_obj(date)
2570        start_date_for_return_data = self.__to_date_obj(feature_data[0]["date"])
2571        features_for_requested_period = None
2572
2573        if date_obj > start_date_for_return_data:
2574            features_for_requested_period = feature_data[0]["variable"]
2575        else:
2576            i = 0
2577            while i < len(feature_data) - 1:
2578                current_date = self.__to_date_obj(feature_data[i]["date"])
2579                next_date = self.__to_date_obj(feature_data[i + 1]["date"])
2580                if next_date <= date_obj <= current_date:
2581                    features_for_requested_period = feature_data[i + 1]["variable"]
2582                    start_date_for_return_data = next_date
2583                    break
2584                i += 1
2585
2586        if features_for_requested_period is None:
2587            raise BoostedAPIException(f"No feature data was found for requested date: {date_obj}")
2588
2589        features_for_requested_period.sort(key=lambda x: x["value"], reverse=True)
2590
2591        if type(N) is int and N > 0:
2592            df = pd.DataFrame.from_dict(features_for_requested_period[0:N])
2593        else:
2594            df = pd.DataFrame.from_dict(features_for_requested_period)
2595        result = df[["feature", "value"]]
2596
2597        return result.rename(columns={"feature": f"feature ({start_date_for_return_data})"})
2598
2599    def getAllModelNames(self) -> Dict[str, str]:
2600        url = f"{self.base_uri}/api/graphql"
2601        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
2602        req_json = {"query": "query listOfModels {\n models { id name }}", "variables": {}}
2603        res = requests.post(url, json=req_json, headers=headers, **self._request_params)
2604        if not res.ok:
2605            error_msg = self._try_extract_error_code(res)
2606            logger.error(error_msg)
2607            raise BoostedAPIException(f"Failed to get user models: {error_msg}")
2608        data = res.json()
2609        if data["data"]["models"] is None:
2610            return {}
2611        return {rec["id"]: rec["name"] for rec in data["data"]["models"]}
2612
2613    def getAllModelDetails(self) -> Dict[str, Dict[str, Any]]:
2614        url = f"{self.base_uri}/api/graphql"
2615        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
2616        req_json = {
2617            "query": "query listOfModels {\n models { id name lastUpdated portfolios { id name }}}",
2618            "variables": {},
2619        }
2620        res = requests.post(url, json=req_json, headers=headers, **self._request_params)
2621        if not res.ok:
2622            error_msg = self._try_extract_error_code(res)
2623            logger.error(error_msg)
2624            raise BoostedAPIException(f"Failed to get user models: {error_msg}")
2625        data = res.json()
2626        if data["data"]["models"] is None:
2627            return {}
2628
2629        output_data = {}
2630        for rec in data["data"]["models"]:
2631            model_id = rec["id"]
2632            output_data[model_id] = {
2633                "name": rec["name"],
2634                "last_updated": parser.parse(rec["lastUpdated"]),
2635                "portfolios": rec["portfolios"],
2636            }
2637
2638        return output_data
2639
2640    def get_hedge_experiments(self):
2641        url = self.base_uri + "/api/graphql"
2642        qry = """
2643            query getHedgeExperiments {
2644                hedgeExperiments {
2645                    hedgeExperimentId
2646                    experimentName
2647                    userId
2648                    config
2649                    description
2650                    experimentType
2651                    lastCalculated
2652                    lastModified
2653                    status
2654                    portfolioCalcStatus
2655                    targetSecurities {
2656                        gbiId
2657                        security {
2658                            gbiId
2659                            symbol
2660                            name
2661                        }
2662                        weight
2663                    }
2664                    targetPortfolios {
2665                        portfolioId
2666                    }
2667                    baselineModel {
2668                        id
2669                        name
2670
2671                    }
2672                    baselineScenario {
2673                        hedgeExperimentScenarioId
2674                        scenarioName
2675                        description
2676                        portfolioSettingsJson
2677                        hedgeExperimentPortfolios {
2678                            portfolio {
2679                                id
2680                                name
2681                                modelId
2682                                performanceGridHeader
2683                                performanceGrid
2684                                status
2685                                tearSheet {
2686                                    groupName
2687                                    members {
2688                                        name
2689                                        value
2690                                    }
2691                                }
2692                            }
2693                        }
2694                        status
2695                    }
2696                    baselineStockUniverseId
2697                }
2698            }
2699        """
2700
2701        headers = {"Authorization": "ApiKey " + self.api_key}
2702        resp = requests.post(url, json={"query": qry}, headers=headers, params=self._request_params)
2703
2704        json_resp = resp.json()
2705        # graphql endpoints typically return 200 or 400 status codes, so we must
2706        # check if we have any errors, even with a 200
2707        if (resp.ok and "errors" in json_resp) or not resp.ok:
2708            error_msg = self._try_extract_error_code(resp)
2709            logger.error(error_msg)
2710            raise BoostedAPIException(
2711                (f"Failed to get hedge experiments: {resp.status_code=}; {error_msg=}")
2712            )
2713
2714        json_experiments = resp.json()["data"]["hedgeExperiments"]
2715        experiments = [HedgeExperiment.from_json_dict(exp_json) for exp_json in json_experiments]
2716        return experiments
2717
2718    def get_hedge_experiment_details(self, experiment_id: str):
2719        url = self.base_uri + "/api/graphql"
2720        qry = """
2721            query getHedgeExperimentDetails($hedgeExperimentId: ID!) {
2722                hedgeExperiment(hedgeExperimentId: $hedgeExperimentId) {
2723                ...HedgeExperimentDetailsSummaryListFragment
2724                }
2725            }
2726
2727            fragment HedgeExperimentDetailsSummaryListFragment on HedgeExperiment {
2728                hedgeExperimentId
2729                experimentName
2730                userId
2731                config
2732                description
2733                experimentType
2734                lastCalculated
2735                lastModified
2736                status
2737                portfolioCalcStatus
2738                targetSecurities {
2739                    gbiId
2740                    security {
2741                        gbiId
2742                        symbol
2743                        name
2744                    }
2745                    weight
2746                }
2747                selectedModels {
2748                    id
2749                    name
2750                    stockUniverse {
2751                        name
2752                    }
2753                }
2754                hedgeExperimentScenarios {
2755                    ...experimentScenarioFragment
2756                }
2757                selectedDummyHedgeExperimentModels {
2758                    id
2759                    name
2760                    stockUniverse {
2761                        name
2762                    }
2763                }
2764                targetPortfolios {
2765                    portfolioId
2766                }
2767                baselineModel {
2768                    id
2769                    name
2770
2771                }
2772                baselineScenario {
2773                    hedgeExperimentScenarioId
2774                    scenarioName
2775                    description
2776                    portfolioSettingsJson
2777                    hedgeExperimentPortfolios {
2778                        portfolio {
2779                            id
2780                            name
2781                            modelId
2782                            performanceGridHeader
2783                            performanceGrid
2784                            status
2785                            tearSheet {
2786                                groupName
2787                                members {
2788                                    name
2789                                    value
2790                                }
2791                            }
2792                        }
2793                    }
2794                    status
2795                }
2796                baselineStockUniverseId
2797            }
2798
2799            fragment experimentScenarioFragment on HedgeExperimentScenario {
2800                hedgeExperimentScenarioId
2801                scenarioName
2802                status
2803                description
2804                portfolioSettingsJson
2805                hedgeExperimentPortfolios {
2806                    portfolio {
2807                        id
2808                        name
2809                        modelId
2810                        performanceGridHeader
2811                        performanceGrid
2812                        status
2813                        tearSheet {
2814                            groupName
2815                            members {
2816                                name
2817                                value
2818                            }
2819                        }
2820                    }
2821                }
2822            }
2823        """
2824        headers = {"Authorization": "ApiKey " + self.api_key}
2825        resp = requests.post(
2826            url,
2827            json={"query": qry, "variables": {"hedgeExperimentId": experiment_id}},
2828            headers=headers,
2829            params=self._request_params,
2830        )
2831
2832        json_resp = resp.json()
2833        # graphql endpoints typically return 200 or 400 status codes, so we must
2834        # check if we have any errors, even with a 200
2835        if (resp.ok and "errors" in json_resp) or not resp.ok:
2836            error_msg = self._try_extract_error_code(resp)
2837            logger.error(error_msg)
2838            raise BoostedAPIException(
2839                (
2840                    f"Failed to get hedge experiment results for {experiment_id=}: "
2841                    f"{resp.status_code=}; {error_msg=}"
2842                )
2843            )
2844
2845        json_exp_results = json_resp["data"]["hedgeExperiment"]
2846        if json_exp_results is None:
2847            return None  # issued a request with a non-existent experiment_id
2848        exp_results = HedgeExperimentDetails.from_json_dict(json_exp_results)
2849        return exp_results
2850
2851    def get_portfolio_performance(
2852        self,
2853        portfolio_id: str,
2854        start_date: Optional[datetime.date],
2855        end_date: Optional[datetime.date],
2856        daily_returns: bool,
2857    ) -> pd.DataFrame:
2858        """
2859        Get performance data for a portfolio.
2860
2861        Parameters
2862        ----------
2863        portfolio_id: str
2864            UUID corresponding to the portfolio in question.
2865        start_date: datetime.date
2866            Starting cutoff date to filter performance data
2867        end_date: datetime.date
2868            Ending cutoff date to filter performance data
2869        daily_returns: bool
2870            Flag indicating whether to add a new column with the daily return pct calculated
2871
2872        Returns
2873        -------
2874        pd.DataFrame object
2875            Portfolio and benchmark performance.
2876            -index:
2877                "date": pd.DatetimeIndex
2878            -columns:
2879                "benchmark": benchmark performance, % return
2880                "turnover": portfolio turnover, % of equity
2881                "portfolio": return since beginning of portfolio, % return
2882                "daily_returns": daily percent change in value of the portfolio, % return
2883                                (this column is optional and depends on the daily_returns flag)
2884        """
2885        url = f"{self.base_uri}/api/graphql"
2886        qry = """
2887            query getPortfolioPerformance($portfolioId: ID!) {
2888                portfolio(id: $portfolioId) {
2889                    id
2890                    modelId
2891                    name
2892                    status
2893                    performance {
2894                        benchmark
2895                        date
2896                        turnover
2897                        value
2898                    }
2899                }
2900            }
2901        """
2902
2903        headers = {"Authorization": "ApiKey " + self.api_key}
2904        resp = requests.post(
2905            url,
2906            json={"query": qry, "variables": {"portfolioId": portfolio_id}},
2907            headers=headers,
2908            params=self._request_params,
2909        )
2910
2911        json_resp = resp.json()
2912        # the webserver returns an error for non-ready portfolios, so we have to check
2913        # for this prior to the error check below
2914        pf = json_resp["data"].get("portfolio")
2915        if pf is not None and pf["status"] != "READY":
2916            return pd.DataFrame()
2917
2918        # graphql endpoints typically return 200 or 400 status codes, so we must
2919        # check if we have any errors, even with a 200
2920        if (resp.ok and "errors" in json_resp) or not resp.ok:
2921            error_msg = self._try_extract_error_code(resp)
2922            logger.error(error_msg)
2923            raise BoostedAPIException(
2924                (
2925                    f"Failed to get portfolio performance for {portfolio_id=}: "
2926                    f"{resp.status_code=}; {error_msg=}"
2927                )
2928            )
2929
2930        perf = json_resp["data"]["portfolio"]["performance"]
2931        df = pd.DataFrame(perf).set_index("date").rename(columns={"value": "portfolio"})
2932        df.index = pd.to_datetime(df.index)
2933        if daily_returns:
2934            df["daily_returns"] = pd.to_numeric(df["portfolio"]).pct_change()
2935            df = df.dropna(subset=["daily_returns"])
2936        if start_date:
2937            df = df[df.index >= pd.to_datetime(start_date)]
2938        if end_date:
2939            df = df[df.index <= pd.to_datetime(end_date)]
2940        return df.astype(float)
2941
2942    def _is_portfolio_still_running(self, error_msg: str) -> bool:
2943        # this is jank af. a proper fix of this is either at the webserver
2944        # returning a better response for a portfolio in draft HT2-226, OR
2945        # a bigger refactor of the API that moves to more OOP, which would allow us
2946        # to have this data all in one place
2947        return "Could not find a model with this ID" in error_msg
2948
2949    def get_portfolio_factors(self, model_id: str, portfolio_id: str) -> pd.DataFrame:
2950        url = f"{self.base_uri}/api/analysis/factors/{model_id}/{portfolio_id}"
2951        headers = {"Authorization": "ApiKey " + self.api_key}
2952        resp = requests.get(url, headers=headers, params=self._request_params)
2953
2954        json_resp = resp.json()
2955        if (resp.ok and "errors" in json_resp) or not resp.ok:
2956            error_msg = json_resp["errors"][0]
2957            if self._is_portfolio_still_running(error_msg):
2958                return pd.DataFrame()
2959            logger.error(error_msg)
2960            raise BoostedAPIException(
2961                (
2962                    f"Failed to get portfolio factors for {portfolio_id=}: "
2963                    f"{resp.status_code=}; {error_msg=}"
2964                )
2965            )
2966
2967        df = pd.DataFrame(json_resp["data"], columns=json_resp["header_row"])
2968
2969        def to_lower_snake_case(s):  # why are we linting lambdas? :(
2970            return "_".join(w.lower() for w in s.split(" "))
2971
2972        df = df.rename(columns={old: to_lower_snake_case(old) for old in df.columns}).set_index(
2973            "date"
2974        )
2975        df.index = pd.to_datetime(df.index)
2976        return df
2977
2978    def get_portfolio_volatility(self, model_id: str, portfolio_id: str) -> pd.DataFrame:
2979        url = f"{self.base_uri}/api/analysis/volatility_rolling/{model_id}/{portfolio_id}"
2980        headers = {"Authorization": "ApiKey " + self.api_key}
2981        resp = requests.get(url, headers=headers, params=self._request_params)
2982
2983        json_resp = resp.json()
2984        if (resp.ok and "errors" in json_resp) or not resp.ok:
2985            error_msg = json_resp["errors"][0]
2986            if self._is_portfolio_still_running(error_msg):
2987                return pd.DataFrame()
2988            logger.error(error_msg)
2989            raise BoostedAPIException(
2990                (
2991                    f"Failed to get portfolio volatility for {portfolio_id=}: "
2992                    f"{resp.status_code=}; {error_msg=}"
2993                )
2994            )
2995
2996        df = pd.DataFrame(json_resp["data"], columns=json_resp["headerRow"])
2997        df = df.rename(
2998            columns={old: old.lower().replace("avg", "avg_") for old in df.columns}  # type: ignore
2999        ).set_index("date")
3000        df.index = pd.to_datetime(df.index)
3001        return df
3002
3003    def get_portfolio_holdings(self, model_id: str, portfolio_id: str) -> pd.DataFrame:
3004        url = f"{self.base_uri}/api/models/{model_id}/{portfolio_id}/basket-data"
3005        headers = {"Authorization": "ApiKey " + self.api_key}
3006        resp = requests.get(url, headers=headers, params=self._request_params)
3007
3008        # this is a classic abuse of try/except as control flow: we try to get json body
3009        # from the response so that we can error-check. if this fails, we assume we have
3010        # a legit text response (corresponding to the csv data we care about)
3011        try:
3012            json_resp = resp.json()
3013        except json.decoder.JSONDecodeError:
3014            df = pd.read_csv(io.StringIO(resp.text), header=[0])
3015        else:
3016            error_msg = json_resp["errors"][0]
3017            if self._is_portfolio_still_running(error_msg):
3018                return pd.DataFrame()
3019            else:
3020                logger.error(error_msg)
3021                raise BoostedAPIException(
3022                    (
3023                        f"Failed to get portfolio holdings for {portfolio_id=}: "
3024                        f"{resp.status_code=}; {error_msg=}"
3025                    )
3026                )
3027
3028        df = df.rename(columns={old: old.lower() for old in df.columns}).set_index("date")
3029        df.index = pd.to_datetime(df.index)
3030        return df
3031
3032    def getStockDataTableForDate(
3033        self, model_id: str, portfolio_id: str, date: datetime.date
3034    ) -> pd.DataFrame:
3035        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
3036
3037        url_base = f"{self.base_uri}/api/analysis"
3038        url_params = f"{model_id}/{portfolio_id}"
3039        formatted_date = date.strftime("%Y-%m-%d")
3040
3041        stock_prices_url = f"{url_base}/stock-prices/{url_params}/{formatted_date}"
3042        stock_factors_url = f"{url_base}/stock-factors/{url_params}/date/{formatted_date}"
3043
3044        prices_params = {"useTicker": "false", "useCurrentSignals": "true"}
3045        factors_param = {"useTicker": "false", "useCurrentSignals": "true"}
3046
3047        prices_resp = requests.get(
3048            stock_prices_url, headers=headers, params=prices_params, **self._request_params
3049        )
3050        factors_resp = requests.get(
3051            stock_factors_url, headers=headers, params=factors_param, **self._request_params
3052        )
3053
3054        frames = []
3055        gbi_ids = set()
3056        for res in (prices_resp, factors_resp):
3057            if not res.ok:
3058                error_msg = self._try_extract_error_code(res)
3059                logger.error(error_msg)
3060                raise BoostedAPIException(
3061                    (
3062                        f"Failed to fetch stock data table for model {model_id}"
3063                        f" (it's possible no data is present for the given date: {date})."
3064                        f" Error message: {error_msg}"
3065                    )
3066                )
3067            result = res.json()
3068            df = pd.DataFrame(result)
3069            gbi_ids.update(df.columns.to_list())
3070            frames.append(pd.DataFrame(result))
3071
3072        all_gbiid_df = pd.concat(frames)
3073
3074        # Get the metadata of all GBI IDs
3075        gbiid_metadata_res = self._get_graphql(
3076            query=graphql_queries.GET_SEC_INFO_QRY, variables={"ids": [int(x) for x in gbi_ids]}
3077        )
3078        # Build a DF of metadata x GBI IDs
3079        gbiid_metadata_df = pd.DataFrame(
3080            {str(x["gbiId"]): x for x in gbiid_metadata_res["data"]["securities"]}
3081        )
3082        # Slice metadata we care. We'll drop "symbol" at the end.
3083        isin_country_currency_df = gbiid_metadata_df.loc[["isin", "country", "currency", "symbol"]]
3084        # Concatenate metadata to the existing stock data DF
3085        all_gbiid_with_metadata_df = pd.concat([all_gbiid_df, isin_country_currency_df])
3086        gbiid_with_symbol_df = all_gbiid_with_metadata_df.loc[
3087            :, all_gbiid_with_metadata_df.loc["symbol"].notna()
3088        ]
3089        renamed_df = gbiid_with_symbol_df.rename(
3090            index={"isin": "ISIN"}, columns=gbiid_with_symbol_df.loc["symbol"].to_dict()
3091        )
3092        output_df = renamed_df.drop(index=["symbol"])
3093        return output_df
3094
3095    def add_hedge_experiment_scenario(
3096        self,
3097        experiment_id: str,
3098        scenario_name: str,
3099        scenario_settings: PortfolioSettings,
3100        run_scenario_immediately: bool,
3101    ) -> HedgeExperimentScenario:
3102        add_scenario_input = {
3103            "hedgeExperimentId": experiment_id,
3104            "scenarioName": scenario_name,
3105            "portfolioSettingsJson": str(scenario_settings),
3106            "runExperimentOnScenario": run_scenario_immediately,
3107            "createDefaultPortfolio": "false",
3108        }
3109        qry = """
3110            mutation addHedgeExperimentScenario(
3111                $input: AddHedgeExperimentScenarioInput!
3112            ) {
3113                addHedgeExperimentScenario(input: $input) {
3114                    hedgeExperimentScenario {
3115                        hedgeExperimentScenarioId
3116                        scenarioName
3117                        description
3118                        portfolioSettingsJson
3119                    }
3120                }
3121            }
3122
3123        """
3124
3125        url = f"{self.base_uri}/api/graphql"
3126
3127        resp = requests.post(
3128            url,
3129            headers={"Authorization": "ApiKey " + self.api_key},
3130            json={"query": qry, "variables": {"input": add_scenario_input}},
3131        )
3132
3133        json_resp = resp.json()
3134        if (resp.ok and "errors" in json_resp) or not resp.ok:
3135            error_msg = self._try_extract_error_code(resp)
3136            logger.error(error_msg)
3137            raise BoostedAPIException(
3138                (f"Failed to add scenario: {resp.status_code=}; {error_msg=}")
3139            )
3140
3141        scenario_dict = json_resp["data"]["addHedgeExperimentScenario"]["hedgeExperimentScenario"]
3142        if scenario_dict is None:
3143            raise BoostedAPIException(
3144                "Failed to add scenario, likely due to bad experiment id or api key"
3145            )
3146        s = HedgeExperimentScenario.from_json_dict(scenario_dict)
3147        return s
3148
3149    # experiment life cycle has 4 steps:
3150    # 1. creation - essentially a very simple registration of a new instance, returning an id
3151    # 2. modify - populate with settings
3152    # 3. start - run the experiment
3153    # 4. delete - drop the experiment
3154    # while i would prefer to just have 2 funcs for (1,2,3) and (4) for a simpler api,
3155    # we need to expose finer-grained control becuase of how scenarios work.
3156    def create_hedge_experiment(
3157        self,
3158        name: str,
3159        description: str,
3160        experiment_type: hedge_experiment_type,
3161        target_securities: Union[Dict[GbiIdSecurity, float], str, None],
3162    ) -> HedgeExperiment:
3163        # we don't pass target_securities here (as much as id like to) because the
3164        # graphql input doesn't support it at this point
3165
3166        # note that this query returns a lot of null fields at this point, but
3167        # they are necessary for building a HE.
3168        create_qry = """
3169            mutation createDraftMutation($input: CreateHedgeExperimentDraftInput!) {
3170                createHedgeExperimentDraft(input: $input) {
3171                    hedgeExperiment {
3172                        hedgeExperimentId
3173                        experimentName
3174                        userId
3175                        config
3176                        description
3177                        experimentType
3178                        lastCalculated
3179                        lastModified
3180                        status
3181                        portfolioCalcStatus
3182                        targetSecurities {
3183                            gbiId
3184                            security {
3185                                gbiId
3186                                name
3187                                symbol
3188                            }
3189                            weight
3190                        }
3191                        baselineModel {
3192                            id
3193                            name
3194                        }
3195                        baselineScenario {
3196                            hedgeExperimentScenarioId
3197                            scenarioName
3198                            description
3199                            portfolioSettingsJson
3200                            hedgeExperimentPortfolios {
3201                                portfolio {
3202                                    id
3203                                    name
3204                                    modelId
3205                                    performanceGridHeader
3206                                    performanceGrid
3207                                    status
3208                                    tearSheet {
3209                                        groupName
3210                                        members {
3211                                            name
3212                                            value
3213                                        }
3214                                    }
3215                                }
3216                            }
3217                            status
3218                        }
3219                        baselineStockUniverseId
3220                    }
3221                }
3222            }
3223        """
3224
3225        create_input: Dict[str, Any] = {
3226            "name": name,
3227            "experimentType": experiment_type,
3228            "description": description,
3229        }
3230        if isinstance(target_securities, dict):
3231            create_input["setTargetSecurities"] = [
3232                {"gbiId": sec.gbi_id, "weight": weight}
3233                for (sec, weight) in target_securities.items()
3234            ]
3235        elif isinstance(target_securities, str):
3236            create_input["setTargetPortfolios"] = [{"portfolioId": target_securities}]
3237        elif target_securities is None:
3238            pass
3239        else:
3240            raise TypeError(
3241                "Expected value of type Union[Dict[GbiIdSecurity, str], str] for "
3242                f"argument 'target_securities'; got {type(target_securities)}"
3243            )
3244        resp = requests.post(
3245            f"{self.base_uri}/api/graphql",
3246            json={"query": create_qry, "variables": {"input": create_input}},
3247            headers={"Authorization": "ApiKey " + self.api_key},
3248            params=self._request_params,
3249        )
3250
3251        json_resp = resp.json()
3252        if (resp.ok and "errors" in json_resp) or not resp.ok:
3253            error_msg = self._try_extract_error_code(resp)
3254            logger.error(error_msg)
3255            raise BoostedAPIException(
3256                (f"Failed to create hedge experiment: {resp.status_code=}; {error_msg=}")
3257            )
3258
3259        exp_dict = json_resp["data"]["createHedgeExperimentDraft"]["hedgeExperiment"]
3260        experiment = HedgeExperiment.from_json_dict(exp_dict)
3261        return experiment
3262
3263    def modify_hedge_experiment(
3264        self,
3265        experiment_id: str,
3266        name: Optional[str] = None,
3267        description: Optional[str] = None,
3268        experiment_type: Optional[hedge_experiment_type] = None,
3269        target_securities: Union[Dict[GbiIdSecurity, float], str, None] = None,
3270        model_ids: Optional[List[str]] = None,
3271        stock_universe_ids: Optional[List[str]] = None,
3272        create_default_scenario: bool = True,
3273        baseline_model_id: Optional[str] = None,
3274        baseline_stock_universe_id: Optional[str] = None,
3275        baseline_portfolio_settings: Optional[str] = None,
3276    ) -> HedgeExperiment:
3277        mod_qry = """
3278            mutation modifyHedgeExperimentDraft(
3279                $input: ModifyHedgeExperimentDraftInput!
3280            ) {
3281                modifyHedgeExperimentDraft(input: $input) {
3282                    hedgeExperiment {
3283                    ...HedgeExperimentSelectedSecuritiesPageFragment
3284                    }
3285                }
3286            }
3287
3288            fragment HedgeExperimentSelectedSecuritiesPageFragment on HedgeExperiment {
3289                hedgeExperimentId
3290                experimentName
3291                userId
3292                config
3293                description
3294                experimentType
3295                lastCalculated
3296                lastModified
3297                status
3298                portfolioCalcStatus
3299                targetSecurities {
3300                    gbiId
3301                    security {
3302                        gbiId
3303                        name
3304                        symbol
3305                    }
3306                    weight
3307                }
3308                targetPortfolios {
3309                    portfolioId
3310                }
3311                baselineModel {
3312                    id
3313                    name
3314                }
3315                baselineScenario {
3316                    hedgeExperimentScenarioId
3317                    scenarioName
3318                    description
3319                    portfolioSettingsJson
3320                    hedgeExperimentPortfolios {
3321                        portfolio {
3322                            id
3323                            name
3324                            modelId
3325                            performanceGridHeader
3326                            performanceGrid
3327                            status
3328                            tearSheet {
3329                                groupName
3330                                members {
3331                                    name
3332                                    value
3333                                }
3334                            }
3335                        }
3336                    }
3337                    status
3338                }
3339                baselineStockUniverseId
3340            }
3341        """
3342        mod_input = {
3343            "hedgeExperimentId": experiment_id,
3344            "createDefaultScenario": create_default_scenario,
3345        }
3346        if name is not None:
3347            mod_input["newExperimentName"] = name
3348        if description is not None:
3349            mod_input["newExperimentDescription"] = description
3350        if experiment_type is not None:
3351            mod_input["newExperimentType"] = experiment_type
3352        if model_ids is not None:
3353            mod_input["setSelectdModels"] = model_ids
3354        if stock_universe_ids is not None:
3355            mod_input["selectedStockUniverseIds"] = stock_universe_ids
3356        if baseline_model_id is not None:
3357            mod_input["setBaselineModel"] = baseline_model_id
3358        if baseline_stock_universe_id is not None:
3359            mod_input["setBaselineStockUniverse"] = baseline_stock_universe_id
3360        if baseline_portfolio_settings is not None:
3361            mod_input["setBaselinePortfolioSettings"] = baseline_portfolio_settings
3362        # note that the behaviors bound to these data are mutually exclusive,
3363        # and its possible the opposite was set earlier in the DRAFT phase
3364        # of experiment creation, so when setting one, we must unset the other
3365        if isinstance(target_securities, dict):
3366            mod_input["setTargetSecurities"] = [
3367                {"gbiId": sec.gbi_id, "weight": weight}
3368                for (sec, weight) in target_securities.items()
3369            ]
3370            mod_input["setTargetPortfolios"] = None
3371        elif isinstance(target_securities, str):
3372            mod_input["setTargetPortfolios"] = [{"portfolioId": target_securities}]
3373            mod_input["setTargetSecurities"] = None
3374        elif target_securities is None:
3375            pass
3376        else:
3377            raise TypeError(
3378                "Expected value of type Union[Dict[GbiIdSecurity, str], str] "
3379                f"for argument 'target_securities'; got {type(target_securities)}"
3380            )
3381
3382        resp = requests.post(
3383            f"{self.base_uri}/api/graphql",
3384            json={"query": mod_qry, "variables": {"input": mod_input}},
3385            headers={"Authorization": "ApiKey " + self.api_key},
3386            params=self._request_params,
3387        )
3388
3389        json_resp = resp.json()
3390        if (resp.ok and "errors" in json_resp) or not resp.ok:
3391            error_msg = self._try_extract_error_code(resp)
3392            logger.error(error_msg)
3393            raise BoostedAPIException(
3394                (
3395                    f"Failed to modify hedge experiment in preparation for start {experiment_id=}: "
3396                    f"{resp.status_code=}; {error_msg=}"
3397                )
3398            )
3399
3400        exp_dict = json_resp["data"]["modifyHedgeExperimentDraft"]["hedgeExperiment"]
3401        experiment = HedgeExperiment.from_json_dict(exp_dict)
3402        return experiment
3403
3404    def start_hedge_experiment(self, experiment_id: str, *scenario_ids: str) -> HedgeExperiment:
3405        start_qry = """
3406            mutation startHedgeExperiment($input: StartHedgeExperimentInput!) {
3407                startHedgeExperiment(input: $input) {
3408                    hedgeExperiment {
3409                        hedgeExperimentId
3410                        experimentName
3411                        userId
3412                        config
3413                        description
3414                        experimentType
3415                        lastCalculated
3416                        lastModified
3417                        status
3418                        portfolioCalcStatus
3419                        targetSecurities {
3420                            gbiId
3421                            security {
3422                                gbiId
3423                                name
3424                                symbol
3425                            }
3426                            weight
3427                        }
3428                        targetPortfolios {
3429                            portfolioId
3430                        }
3431                        baselineModel {
3432                            id
3433                            name
3434                        }
3435                        baselineScenario {
3436                            hedgeExperimentScenarioId
3437                            scenarioName
3438                            description
3439                            portfolioSettingsJson
3440                            hedgeExperimentPortfolios {
3441                                portfolio {
3442                                    id
3443                                    name
3444                                    modelId
3445                                    performanceGridHeader
3446                                    performanceGrid
3447                                    status
3448                                    tearSheet {
3449                                        groupName
3450                                        members {
3451                                            name
3452                                            value
3453                                        }
3454                                    }
3455                                }
3456                            }
3457                            status
3458                        }
3459                        baselineStockUniverseId
3460                    }
3461                }
3462            }
3463        """
3464        start_input: Dict[str, Any] = {"hedgeExperimentId": experiment_id}
3465        if len(scenario_ids) > 0:
3466            start_input["hedgeExperimentScenarioIds"] = list(scenario_ids)
3467
3468        resp = requests.post(
3469            f"{self.base_uri}/api/graphql",
3470            json={"query": start_qry, "variables": {"input": start_input}},
3471            headers={"Authorization": "ApiKey " + self.api_key},
3472            params=self._request_params,
3473        )
3474
3475        json_resp = resp.json()
3476        if (resp.ok and "errors" in json_resp) or not resp.ok:
3477            error_msg = self._try_extract_error_code(resp)
3478            logger.error(error_msg)
3479            raise BoostedAPIException(
3480                (
3481                    f"Failed to start hedge experiment {experiment_id=}: "
3482                    f"{resp.status_code=}; {error_msg=}"
3483                )
3484            )
3485
3486        exp_dict = json_resp["data"]["startHedgeExperiment"]["hedgeExperiment"]
3487        experiment = HedgeExperiment.from_json_dict(exp_dict)
3488        return experiment
3489
3490    def delete_hedge_experiment(self, experiment_id: str) -> bool:
3491        delete_qry = """
3492            mutation($input: DeleteHedgeExperimentsInput!) {
3493                deleteHedgeExperiments(input: $input) {
3494                    success
3495                }
3496            }
3497        """
3498        delete_input = {"hedgeExperimentIds": [experiment_id]}
3499        resp = requests.post(
3500            f"{self.base_uri}/api/graphql",
3501            json={"query": delete_qry, "variables": {"input": delete_input}},
3502            headers={"Authorization": "ApiKey " + self.api_key},
3503            params=self._request_params,
3504        )
3505
3506        json_resp = resp.json()
3507        if (resp.ok and "errors" in json_resp) or not resp.ok:
3508            error_msg = self._try_extract_error_code(resp)
3509            logger.error(error_msg)
3510            raise BoostedAPIException(
3511                (
3512                    f"Failed to delete hedge experiment {experiment_id=}: "
3513                    + f"status_code={resp.status_code}; error_msg={error_msg}"
3514                )
3515            )
3516
3517        return json_resp["data"]["deleteHedgeExperiments"]["success"]
3518
3519    def create_hedge_basket_position_bounds_from_csv(
3520        self,
3521        filepath: str,
3522        name: str,
3523        description: Optional[str],
3524        mapping_result_filepath: Optional[str],
3525    ) -> str:
3526        DATE = "Date"
3527        ISIN = "ISIN"
3528        COUNTRY = "Country"
3529        CURRENCY = "Currency"
3530        LOWER_BOUND = "Lower Bound"
3531        UPPER_BOUND = "Upper Bound"
3532        supported_columns = {
3533            DATE,
3534            ISIN,
3535            COUNTRY,
3536            CURRENCY,
3537            LOWER_BOUND,
3538            UPPER_BOUND,
3539        }
3540        required_columns = {ISIN, LOWER_BOUND, UPPER_BOUND}
3541
3542        try:
3543            df: pd.DataFrame = pd.read_csv(filepath, parse_dates=True)
3544        except Exception as e:
3545            raise BoostedAPIException(f"Error reading {filepath=}: {e}")
3546
3547        columns = set(df.columns)
3548
3549        # First perform basic data validation
3550        missing_required_columns = required_columns - columns
3551        if missing_required_columns:
3552            raise BoostedAPIException(
3553                f"The following required columns are missing: {missing_required_columns}"
3554            )
3555        extra_columns = columns - supported_columns
3556        if extra_columns:
3557            logger.warning(
3558                f"The following columns are unsupported and will be ignored: {extra_columns}"
3559            )
3560        try:
3561            df[LOWER_BOUND] = df[LOWER_BOUND].astype(float)
3562            df[UPPER_BOUND] = df[UPPER_BOUND].astype(float)
3563            df[ISIN] = df[ISIN].astype(str)
3564        except Exception as e:
3565            raise BoostedAPIException(f"Column datatypes are incorrect: {e}")
3566        lb_gt_ub = df[df[LOWER_BOUND] > df[UPPER_BOUND]]
3567        if not lb_gt_ub.empty:
3568            raise BoostedAPIException(
3569                f"Lower Bound must be <= Upper Bound, but these are not: {lb_gt_ub[ISIN].tolist()}"
3570            )
3571        out_of_range = df[
3572            (
3573                (df[LOWER_BOUND] < 0)
3574                | (df[LOWER_BOUND] > 1)
3575                | (df[UPPER_BOUND] < 0)
3576                | (df[UPPER_BOUND] > 1)
3577            )
3578        ]
3579        if not out_of_range.empty:
3580            raise BoostedAPIException("Lower Bound and Upper Bound values must be in range [0, 1]")
3581
3582        # Now map the security info into GBI IDs
3583        rows = list(df.to_dict(orient="index").values())
3584        sec_data_list = self.getGbiIdFromIdentCountryCurrencyDate(
3585            ident_country_currency_dates=[
3586                DateIdentCountryCurrency(
3587                    date=row.get(DATE, datetime.date.today().isoformat()),
3588                    identifier=row.get(ISIN),
3589                    id_type=ColumnSubRole.ISIN,
3590                    country=row.get(COUNTRY),
3591                    currency=row.get(CURRENCY),
3592                )
3593                for row in rows
3594            ]
3595        )
3596
3597        # Now take each row and its gbi id mapping, and create the bounds list
3598        bounds = []
3599        for row, sec_data in zip(rows, sec_data_list):
3600            if sec_data is None:
3601                logger.warning(f"Failed to map {row[ISIN]}, skipping this security.")
3602            else:
3603                bounds.append(
3604                    {"gbi_id": str(sec_data.gbi_id), "lb": row[LOWER_BOUND], "ub": row[UPPER_BOUND]}
3605                )
3606
3607                # Add security metadata to see the mapping
3608                row["Mapped GBI ID"] = sec_data.gbi_id
3609                row[f"Mapped {ISIN}"] = sec_data.isin_info.identifier
3610                row[f"Mapped {COUNTRY}"] = sec_data.isin_info.country
3611                row[f"Mapped {CURRENCY}"] = sec_data.isin_info.currency
3612                row["Mapped Ticker"] = sec_data.ticker
3613                row["Mapped Company Name"] = sec_data.company_name
3614
3615        # Call endpoint to create the bounds settings template
3616        qry = """
3617              mutation CreatePartialStrategyTemplate(
3618                $portfolioSettingsKey: String!
3619                $partialSettings: String!
3620                $name: String!
3621                $description: String
3622              ) {
3623                createPartialStrategyTemplate(
3624                  portfolioSettingsKey: $portfolioSettingsKey
3625                  partialSettings: $partialSettings
3626                  name: $name
3627                  description: $description
3628                )
3629              }
3630            """
3631        variables = {
3632            "portfolioSettingsKey": "basketTrading.positionSizeBounds",
3633            "partialSettings": json.dumps(bounds),
3634            "name": name,
3635            "description": description,
3636        }
3637        resp = self._get_graphql(qry, variables=variables)
3638
3639        # Write mapped csv for reference
3640        if mapping_result_filepath is not None:
3641            pd.DataFrame(rows).to_csv(mapping_result_filepath)
3642
3643        return resp["data"]["createPartialStrategyTemplate"]
3644
3645    def get_hit_rate_file(self, model_id: str, portfolio_id: str, file_key: str) -> dict:
3646        url = f"{self.base_uri}{ROUTE_PREFIX}{DAL_PA_ROUTE}/get-hit-rate-file/"
3647        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
3648        req_json = {"model_id": model_id, "portfolio_id": portfolio_id, "file_key": file_key}
3649        res = requests.post(url, json=req_json, headers=headers, **self._request_params)
3650        if not res.ok:
3651            error_msg = self._try_extract_error_code(res)
3652            logger.error(error_msg)
3653            raise BoostedAPIException(f"Failed to get Hit Rate file: {error_msg}")
3654
3655        data = res.json()
3656        return data
3657
3658    def get_hit_rate_with_securities(
3659        self,
3660        model_id: str,
3661        portfolio_id: str,
3662        meet_all_conditions: bool,
3663        securities: List[str],
3664        countries: List[str],
3665        sectors: List[str],
3666        start_date: Optional[BoostedDate],
3667        end_date: Optional[BoostedDate],
3668    ) -> dict:
3669
3670        start_date, end_date = get_date_range(start_date=start_date, end_date=end_date)
3671        start_date, end_date = start_date.isoformat(), end_date.isoformat()
3672
3673        url = f"{self.base_uri}{WATCHLIST_ROUTE_PREFIX}{DAL_PA_ROUTE}/get-hit-rate/"  # noqa f"http://0.0.0.0:8000{DAL_PA_ROUTE}/get-securities-hit-rate/"
3674        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
3675        req_json = {
3676            "model_id": model_id,
3677            "portfolio_id": portfolio_id,
3678            "meet_all_conditions": meet_all_conditions,
3679            "securities": securities,
3680            "countries": countries,
3681            "sectors": sectors,
3682            "start_date": start_date,
3683            "end_date": end_date,
3684        }
3685        res = requests.post(url, json=req_json, headers=headers, **self._request_params)
3686
3687        if not res.ok:
3688            error_msg = self._try_extract_error_code(res)
3689            logger.error(error_msg)
3690            raise BoostedAPIException(f"Failed to get Hit Rate with securities: {error_msg}")
3691
3692        data = res.json()
3693        return data
3694
3695    def get_portfolio_accuracy(
3696        self,
3697        model_id: str,
3698        portfolio_id: str,
3699        start_date: Optional[BoostedDate] = None,
3700        end_date: Optional[BoostedDate] = None,
3701    ) -> dict:
3702        if start_date and end_date:
3703            validate_start_and_end_dates(start_date=start_date, end_date=end_date)
3704            start_date = convert_date(start_date)
3705            end_date = convert_date(end_date)
3706
3707        # TODO: Later change this URI to not use the watchlist prefix. It is misnamed.
3708        url = f"{self.base_uri}{WATCHLIST_ROUTE_PREFIX}{DAL_PA_ROUTE}/get-hit-rate/"
3709        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
3710        req_json = {"model_id": model_id, "portfolio_id": portfolio_id}
3711        if start_date and end_date:
3712            req_json["start_date"] = start_date.isoformat()
3713            req_json["end_date"] = end_date.isoformat()
3714        res = requests.post(url, json=req_json, headers=headers, **self._request_params)
3715
3716        if not res.ok:
3717            error_msg = self._try_extract_error_code(res)
3718            logger.error(error_msg)
3719            raise BoostedAPIException(f"Failed to get Hit Rate: {error_msg}")
3720
3721        data = res.json()
3722        return data
3723
3724    def create_watchlist(self, name: str) -> str:
3725        url = f"{self.base_uri}{WATCHLIST_ROUTE_PREFIX}{DAL_WATCHLIST_ROUTE}/create/"
3726        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
3727        req_json = {"name": name}
3728        res = requests.post(url, json=req_json, headers=headers, **self._request_params)
3729
3730        if not res.ok:
3731            error_msg = self._try_extract_error_code(res)
3732            logger.error(error_msg)
3733            raise BoostedAPIException(f"Failed to get user models: {error_msg}")
3734
3735        data = res.json()
3736        return data["watchlist_id"]
3737
3738    def _get_graphql(
3739        self,
3740        query: str,
3741        variables: Dict,
3742        error_msg_prefix: str = "Failed to get graphql result: ",
3743        log_error: bool = True,
3744    ) -> Dict:
3745        headers = {"Authorization": "ApiKey " + self.api_key}
3746        json_req = {"query": query, "variables": variables}
3747
3748        url = self.base_uri + "/api/graphql"
3749        resp = requests.post(
3750            url,
3751            json=json_req,
3752            headers=headers,
3753            params=self._request_params,
3754        )
3755
3756        # graphql endpoints typically return 200 or 400 status codes, so we must
3757        # check if we have any errors, even with a 200
3758        if not resp.ok or (resp.ok and "errors" in resp.json()):
3759            error_msg = self._try_extract_error_code(resp)
3760            error_str = str(error_msg_prefix) + f" {resp.status_code=}; {error_msg=}"
3761            if log_error:
3762                logger.error(error_str)
3763            raise BoostedAPIException(error_str)
3764
3765        json_resp = resp.json()
3766        return json_resp
3767
3768    def _get_security_info(self, gbi_ids: List[int]) -> Dict:
3769        query = graphql_queries.GET_SEC_INFO_QRY
3770        variables = {
3771            "ids": [] if not gbi_ids else gbi_ids,
3772        }
3773
3774        error_msg_prefix = "Failed to get Security Details:"
3775        return self._get_graphql(
3776            query=query, variables=variables, error_msg_prefix=error_msg_prefix
3777        )
3778
3779    def _get_sector_info(self) -> Dict:
3780        """
3781        Returns a list of sector objects, e.g.
3782        {
3783            "id": 1010,
3784            "parentId": 10,
3785            "name": "Energy",
3786            "topParentName": null,
3787            "spiqSectorId": -1,
3788            "legacy": false
3789        }
3790        """
3791        url = f"{self.base_uri}/api/sectors"
3792        headers = {"Authorization": "ApiKey " + self.api_key}
3793        res = requests.get(url, headers=headers, **self._request_params)
3794        self._check_ok_or_err_with_msg(res, "Failed to get sectors data")
3795        return res.json()["sectors"]
3796
3797    def _get_watchlist_analysis(
3798        self,
3799        gbi_ids: List[int],
3800        model_ids: List[str],
3801        portfolio_ids: List[str],
3802        asof_date=datetime.date.today(),
3803    ) -> Dict:
3804        query = graphql_queries.WATCHLIST_ANALYSIS_QRY
3805        variables = {
3806            "gbiIds": gbi_ids,
3807            "modelIds": model_ids,
3808            "portfolioIds": portfolio_ids,
3809            "date": self.__iso_format(asof_date),
3810        }
3811        error_msg_prefix = "Failed to get Coverage Analysis:"
3812        return self._get_graphql(
3813            query=query, variables=variables, error_msg_prefix=error_msg_prefix
3814        )
3815
3816    def _get_models_for_portfolio(self, portfolio_ids: List[str]) -> Dict:
3817        query = graphql_queries.GET_MODELS_FOR_PORTFOLIOS_QRY
3818        variables = {"ids": portfolio_ids}
3819        error_msg_prefix = "Failed to get Models for Portfolios: "
3820        return self._get_graphql(
3821            query=query, variables=variables, error_msg_prefix=error_msg_prefix
3822        )
3823
3824    def _get_excess_return(
3825        self, model_ids: List[str], gbi_ids: List[int], asof_date=datetime.date.today()
3826    ) -> Dict:
3827        query = graphql_queries.GET_EXCESS_RETURN_QRY
3828
3829        variables = {
3830            "modelIds": model_ids,
3831            "gbiIds": gbi_ids,
3832            "date": self.__iso_format(asof_date),
3833        }
3834        error_msg_prefix = "Failed to get Excess Return Slugging Pct: "
3835        return self._get_graphql(
3836            query=query, variables=variables, error_msg_prefix=error_msg_prefix
3837        )
3838
3839    def _coverage_column_name_format(self, in_str) -> str:
3840        if in_str.upper() == "ISIN":
3841            return "ISIN"
3842
3843        return in_str.title()
3844
3845    def _get_model_stocks(self, model_id: str) -> List[GbiIdTickerISIN]:
3846        # first, get the universe id
3847        resp = self._get_graphql(
3848            graphql_queries.GET_MODEL_STOCK_UNIVERSE_ID_QUERY,
3849            variables={"modelId": model_id},
3850            error_msg_prefix="Failed to get model stock universe ID",
3851        )
3852        universe_id = resp["data"]["model"]["stockUniverseId"]
3853
3854        # now, query for universe stocks
3855        url = self.base_uri + f"/api/stocks/model-universe/{universe_id}"
3856        headers = {"Authorization": "ApiKey " + self.api_key}
3857        universe_resp = requests.get(url, headers=headers, **self._request_params)
3858        universe = universe_resp.json()["stockUniverse"]
3859        securities = [
3860            GbiIdTickerISIN(gbi_id=security["id"], ticker=security["symbol"], isin=security["isin"])
3861            for security in universe
3862        ]
3863        return securities
3864
3865    def get_coverage_info(self, watchlist_id: str, portfolio_group_id: str) -> pd.DataFrame:
3866        # get securities list in watchlist
3867        watchlist_details = self.get_watchlist_details(watchlist_id)
3868        security_list = watchlist_details["targets"]
3869
3870        gbi_ids = [x["gbi_id"] for x in security_list]
3871
3872        gbi_data: Dict[Any, Dict] = {x: {} for x in gbi_ids}
3873
3874        # get security info ticker, name, industry etc
3875        sec_info = self._get_security_info(gbi_ids)
3876
3877        for sec in sec_info["data"]["securities"]:
3878            gbi_id = sec["gbiId"]
3879            for k in ["symbol", "name", "isin", "country", "currency"]:
3880                gbi_data[gbi_id][self._coverage_column_name_format(k)] = sec[k]
3881
3882            gbi_data[gbi_id][self._coverage_column_name_format("Sector")] = sec["sector"][
3883                "topParentName"
3884            ]
3885
3886        # get portfolios list in portfolio_Group
3887        portfolio_group = self.get_portfolio_group(portfolio_group_id)
3888        portfolio_ids = [x["portfolio_id"] for x in portfolio_group["portfolios"]]
3889        portfolio_info = {x["portfolio_id"]: x for x in portfolio_group["portfolios"]}
3890
3891        model_resp = self._get_models_for_portfolio(portfolio_ids=portfolio_ids)
3892        for portfolio in model_resp["data"]["portfolios"]:
3893            portfolio_info[portfolio["id"]].update(portfolio)
3894
3895        model_info = {
3896            x["modelId"]: portfolio_info[x["id"]] for x in model_resp["data"]["portfolios"]
3897        }
3898
3899        # model_ids and portfolio_ids are parallel arrays
3900        model_ids = [portfolio_info[x]["modelId"] for x in portfolio_ids]
3901
3902        # graphql: get watchlist analysis
3903        wl_analysis = self._get_watchlist_analysis(
3904            gbi_ids=gbi_ids,
3905            model_ids=model_ids,
3906            portfolio_ids=portfolio_ids,
3907            asof_date=datetime.date.today(),
3908        )
3909
3910        portfolio_gbi_data: Dict[Any, Dict] = {k: {} for k in portfolio_ids}
3911        for pi, v in portfolio_gbi_data.items():
3912            v.update({k: {} for k in gbi_data.keys()})
3913
3914        equity_explorer_date = wl_analysis["data"]["watchlistAnalysis"][0]["analysisDates"][0][
3915            "date"
3916        ]
3917        for wla in wl_analysis["data"]["watchlistAnalysis"]:
3918            gbi_id = wla["gbiId"]
3919            gbi_data[gbi_id]["Composite Rating"] = wla["analysisDates"][0]["aggregateSignal"][
3920                "rating"
3921            ]
3922            gbi_data[gbi_id]["Composite Rating Delta"] = wla["analysisDates"][0]["aggregateSignal"][
3923                "ratingDelta"
3924            ]
3925
3926            for p in wla["analysisDates"][0]["portfoliosSignals"]:
3927                model_name = portfolio_info[p["portfolioId"]]["modelName"]
3928
3929                portfolio_gbi_data[p["portfolioId"]][gbi_id][
3930                    model_name + self._coverage_column_name_format(": rank")
3931                ] = (p["rank"] + 1)
3932                portfolio_gbi_data[p["portfolioId"]][gbi_id][
3933                    model_name + self._coverage_column_name_format(": rank delta")
3934                ] = (-1 * p["signalDelta"])
3935                portfolio_gbi_data[p["portfolioId"]][gbi_id][
3936                    model_name + self._coverage_column_name_format(": rating")
3937                ] = p["rating"]
3938                portfolio_gbi_data[p["portfolioId"]][gbi_id][
3939                    model_name + self._coverage_column_name_format(": rating delta")
3940                ] = p["ratingDelta"]
3941
3942        neg_rec: Dict[Any, Dict] = {k: {} for k in gbi_data.keys()}
3943        pos_rec: Dict[Any, Dict] = {k: {} for k in gbi_data.keys()}
3944        for wla in wl_analysis["data"]["watchlistAnalysis"]:
3945            gbi_id = wla["gbiId"]
3946
3947            for pid, signals in zip(portfolio_ids, wla["analysisDates"][0]["portfoliosSignals"]):
3948                model_name = portfolio_info[pid]["modelName"]
3949                neg_rec[gbi_id][
3950                    model_name + self._coverage_column_name_format(": negative recommendation")
3951                ] = signals["explainWeightNeg"]
3952                pos_rec[gbi_id][
3953                    model_name + self._coverage_column_name_format(": positive recommendation")
3954                ] = signals["explainWeightPos"]
3955
3956        # graphql: GetExcessReturn - slugging pct
3957        er_sp = self._get_excess_return(
3958            model_ids=model_ids, gbi_ids=gbi_ids, asof_date=equity_explorer_date
3959        )
3960
3961        for model in er_sp["data"]["models"]:
3962            model_name = model_info[model["id"]]["modelName"]
3963            for stat in model["equityExplorerData"]["equityExplorerSummaryStatistics"]:
3964                portfolioId = model_info[model["id"]]["id"]
3965                portfolio_gbi_data[portfolioId][int(stat["gbiId"])][
3966                    model_name + self._coverage_column_name_format(": slugging %")
3967                ] = (stat["ER"]["SP"]["sixMonthWindowOneMonthHorizon"] * 100)
3968
3969        # add rank, rating, slugging
3970        for pid, v in portfolio_gbi_data.items():
3971            for gbi_id, vv in v.items():
3972                gbi_data[gbi_id].update(vv)
3973
3974        # add neg/pos rec scores
3975        for rec in [neg_rec, pos_rec]:
3976            for k, v in rec.items():
3977                gbi_data[k].update(v)
3978
3979        df = pd.DataFrame.from_records([v for _, v in gbi_data.items()])
3980
3981        return df
3982
3983    def get_coverage_csv(
3984        self, watchlist_id: str, portfolio_group_id: str, filepath: Optional[str] = None
3985    ) -> Optional[str]:
3986        """
3987        Converts the coverage contents to CSV format
3988        Parameters
3989        ----------
3990        watchlist_id: str
3991            UUID str identifying the coverage watchlist
3992        portfolio_group_id: str
3993            UUID str identifying the group of portfolio to use for analysis
3994        filepath: Optional[str]
3995            UUID str identifying the group of portfolio to use for analysis
3996
3997        Returns:
3998        ----------
3999        None if filepath is provided, else a string with a csv's contents is returned
4000        """
4001
4002        df = self.get_coverage_info(watchlist_id, portfolio_group_id)
4003
4004        return df.to_csv(filepath, index=False, float_format="%.4f")
4005
4006    def get_watchlist_details(self, watchlist_id: str) -> Dict:
4007        url = f"{self.base_uri}{ROUTE_PREFIX}{DAL_WATCHLIST_ROUTE}/details/"
4008        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
4009        req_json = {"watchlist_id": watchlist_id}
4010        res = requests.post(url, json=req_json, headers=headers, **self._request_params)
4011
4012        if not res.ok:
4013            error_msg = self._try_extract_error_code(res)
4014            logger.error(error_msg)
4015            raise BoostedAPIException(f"Failed to get user models: {error_msg}")
4016
4017        data = res.json()
4018        return data
4019
4020    def create_watchlist_from_file(self, name: str, filepath: str) -> str:
4021        url = f"{self.base_uri}{ROUTE_PREFIX}{DAL_WATCHLIST_ROUTE}/create_watchlist_from_file/"
4022        headers = {"Authorization": "ApiKey " + self.api_key}
4023
4024        with open(filepath, "rb") as fp:
4025            file_bytes = fp.read()
4026
4027        file_bytes_base64 = base64.b64encode(file_bytes).decode("ascii")
4028        json_req = {
4029            "content_type": mimetypes.guess_type(filepath)[0],
4030            "file_bytes_base64": file_bytes_base64,
4031            "name": name,
4032        }
4033
4034        res = requests.post(url, json=json_req, headers=headers)
4035
4036        if not res.ok:
4037            error_msg = self._try_extract_error_code(res)
4038            logger.error(error_msg)
4039            raise BoostedAPIException(f"Failed to create watchlist from file: {error_msg}")
4040
4041        data = res.json()
4042        return data["watchlist_id"]
4043
4044    def get_watchlists(self) -> List[Dict]:
4045        url = f"{self.base_uri}{ROUTE_PREFIX}{DAL_WATCHLIST_ROUTE}/get_user_watchlists/"
4046        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
4047        req_json: Dict = {}
4048        res = requests.post(url, json=req_json, headers=headers, **self._request_params)
4049
4050        if not res.ok:
4051            error_msg = self._try_extract_error_code(res)
4052            logger.error(error_msg)
4053            raise BoostedAPIException(f"Failed to get user watchlists: {error_msg}")
4054
4055        data = res.json()
4056        return data["watchlists"]
4057
4058    def get_watchlist_contents(self, watchlist_id) -> Dict:
4059        url = f"{self.base_uri}{ROUTE_PREFIX}{DAL_WATCHLIST_ROUTE}/contents/"
4060        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
4061        req_json = {"watchlist_id": watchlist_id}
4062        res = requests.post(url, json=req_json, headers=headers, **self._request_params)
4063
4064        if not res.ok:
4065            error_msg = self._try_extract_error_code(res)
4066            logger.error(error_msg)
4067            raise BoostedAPIException(f"Failed to get watchlist contents: {error_msg}")
4068
4069        data = res.json()
4070        return data
4071
4072    def get_watchlist_contents_as_csv(self, watchlist_id, filepath) -> None:
4073        data = self.get_watchlist_contents(watchlist_id)
4074        df = pd.DataFrame(data["contents"])
4075        df.to_csv(filepath, index=False)
4076
4077    # TODO this will need to be enhanced to accept country/currency overrides
4078    def add_securities_to_watchlist(
4079        self, watchlist_id: str, identifiers: List[str], identifier_type: Literal["TICKER", "ISIN"]
4080    ) -> Dict:
4081        # should we just make the arg lower? all caps has a flag-like feel to it
4082        id_type = identifier_type.lower()
4083        url = f"{self.base_uri}{WATCHLIST_ROUTE_PREFIX}{DAL_WATCHLIST_ROUTE}/add_{id_type}s/"
4084        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
4085        req_json = {"watchlist_id": watchlist_id, id_type: identifiers}
4086        res = requests.post(url, json=req_json, headers=headers, **self._request_params)
4087
4088        if not res.ok:
4089            error_msg = self._try_extract_error_code(res)
4090            logger.error(error_msg)
4091            raise BoostedAPIException(f"Failed to get user models: {error_msg}")
4092
4093        data = res.json()
4094        return data
4095
4096    def remove_securities_from_watchlist(
4097        self, watchlist_id: str, identifiers: List[str], identifier_type: Literal["TICKER", "ISIN"]
4098    ) -> Dict:
4099        # should we just make the arg lower? all caps has a flag-like feel to it
4100        id_type = identifier_type.lower()
4101        url = f"{self.base_uri}{WATCHLIST_ROUTE_PREFIX}{DAL_WATCHLIST_ROUTE}/remove_{id_type}s/"
4102        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
4103        req_json = {"watchlist_id": watchlist_id, id_type: identifiers}
4104        res = requests.post(url, json=req_json, headers=headers, **self._request_params)
4105
4106        if not res.ok:
4107            error_msg = self._try_extract_error_code(res)
4108            logger.error(error_msg)
4109            raise BoostedAPIException(f"Failed to get user models: {error_msg}")
4110
4111        data = res.json()
4112        return data
4113
4114    def get_portfolio_groups(
4115        self,
4116    ) -> Dict:
4117        """
4118        Parameters: None
4119
4120
4121        Returns:
4122        ----------
4123
4124        Dict:  {
4125        user_id: str
4126        portfolio_groups: List[PortfolioGroup]
4127        }
4128        where PortfolioGroup is defined as = Dict {
4129        group_id: str
4130        group_name: str
4131        portfolios: List[PortfolioInGroup]
4132        }
4133        where PortfolioInGroup is defined as = Dict {
4134        portfolio_id: str
4135        rank_in_group: Optional[int]
4136        }
4137        """
4138        url = f"{self.base_uri}{WATCHLIST_ROUTE_PREFIX}{PORTFOLIO_GROUP_ROUTE}/get"
4139        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
4140        req_json: Dict = {}
4141        res = requests.post(url, json=req_json, headers=headers, **self._request_params)
4142
4143        if not res.ok:
4144            error_msg = self._try_extract_error_code(res)
4145            logger.error(error_msg)
4146            raise BoostedAPIException(f"Failed to get user portfolio groups: {error_msg}")
4147
4148        data = res.json()
4149        return data
4150
4151    def get_portfolio_group(self, portfolio_group_id: str) -> Dict:
4152        """
4153        Parameters:
4154        portfolio_group_id: str
4155           UUID identifier for the portfolio group
4156
4157
4158        Returns:
4159        ----------
4160
4161        PortfolioGroup: Dict:  {
4162        group_id: str
4163        group_name: str
4164        portfolios: List[PortfolioInGroup]
4165        }
4166        where PortfolioInGroup is defined as = Dict {
4167        portfolio_id: str
4168        portfolio_name: str
4169        rank_in_group: Optional[int]
4170        }
4171        """
4172        url = f"{self.base_uri}{WATCHLIST_ROUTE_PREFIX}{PORTFOLIO_GROUP_ROUTE}/get-one"
4173        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
4174        req_json = {"portfolio_group_id": portfolio_group_id}
4175        res = requests.post(url, json=req_json, headers=headers, **self._request_params)
4176
4177        if not res.ok:
4178            error_msg = self._try_extract_error_code(res)
4179            logger.error(error_msg)
4180            raise BoostedAPIException(f"Failed to get user portfolio groups: {error_msg}")
4181
4182        data = res.json()
4183        return data
4184
4185    def set_sticky_portfolio_group(
4186        self,
4187        portfolio_group_id: str,
4188    ) -> Dict:
4189        """
4190        Set sticky portfolio group
4191
4192        Parameters
4193        ----------
4194
4195        group_id: str,
4196           UUID str identifying a portfolio group
4197
4198        Returns:
4199        -------
4200        Dict {
4201            changed: int - 1 == success
4202        }
4203        """
4204        url = f"{self.base_uri}{WATCHLIST_ROUTE_PREFIX}{PORTFOLIO_GROUP_ROUTE}/set-sticky"
4205        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
4206        req_json = {"portfolio_group_id": portfolio_group_id}
4207        res = requests.post(url, json=req_json, headers=headers, **self._request_params)
4208
4209        if not res.ok:
4210            error_msg = self._try_extract_error_code(res)
4211            logger.error(error_msg)
4212            raise BoostedAPIException(f"Failed to set sticky portfolio group: {error_msg}")
4213
4214        data = res.json()
4215        return data
4216
4217    def get_sticky_portfolio_group(
4218        self,
4219    ) -> Dict:
4220        """
4221        Get sticky portfolio group for the user
4222
4223        Parameters
4224        ----------
4225
4226        Returns:
4227        -------
4228        Dict {
4229            group_id: str
4230            group_name: str
4231            portfolios: List[PortfolioInGroup(Dict)]
4232                  PortfolioInGroup(Dict):
4233                           portfolio_id: str
4234                           rank_in_group: Optional[int] = None
4235                           portfolio_name: Optional[str] = None
4236        }
4237        """
4238        url = f"{self.base_uri}{WATCHLIST_ROUTE_PREFIX}{PORTFOLIO_GROUP_ROUTE}/get-sticky"
4239        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
4240        req_json: Dict = {}
4241        res = requests.post(url, json=req_json, headers=headers, **self._request_params)
4242
4243        if not res.ok:
4244            error_msg = self._try_extract_error_code(res)
4245            logger.error(error_msg)
4246            raise BoostedAPIException(f"Failed to get sticky portfolio group: {error_msg}")
4247
4248        data = res.json()
4249        return data
4250
4251    def create_portfolio_group(
4252        self,
4253        group_name: str,
4254        portfolios: Optional[List[Dict]] = None,
4255    ) -> Dict:
4256        """
4257        Create a new portfolio group
4258
4259        Parameters
4260        ----------
4261
4262        group_name: str
4263           name of the new group
4264
4265        portfolios: List of Dict [:
4266
4267        portfolio_id: str
4268        rank_in_group: Optional[int] = None
4269        ]
4270
4271        Returns:
4272        ----------
4273
4274        Dict: {
4275        group_id: str
4276           UUID identifier for the portfolio group
4277
4278        created: int
4279           num groups created, 1 == success
4280
4281        added: int
4282           num portfolios added to the group, should match the length of 'portfolios' argument
4283        }
4284        """
4285        url = f"{self.base_uri}{WATCHLIST_ROUTE_PREFIX}{PORTFOLIO_GROUP_ROUTE}/create"
4286        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
4287        req_json = {"group_name": group_name, "portfolios": portfolios}
4288
4289        res = requests.post(url, json=req_json, headers=headers, **self._request_params)
4290
4291        if not res.ok:
4292            error_msg = self._try_extract_error_code(res)
4293            logger.error(error_msg)
4294            raise BoostedAPIException(f"Failed to create portfolio group: {error_msg}")
4295
4296        data = res.json()
4297        return data
4298
4299    def rename_portfolio_group(
4300        self,
4301        group_id: str,
4302        group_name: str,
4303    ) -> Dict:
4304        """
4305        Rename a portfolio group
4306
4307        Parameters
4308        ----------
4309
4310        group_id: str,
4311           UUID str identifying a portfolio group
4312
4313        group_name: str,
4314           The new name for the porfolio
4315
4316        Returns:
4317        -------
4318        Dict {
4319            changed: int - 1 == success
4320        }
4321        """
4322        url = f"{self.base_uri}{WATCHLIST_ROUTE_PREFIX}{PORTFOLIO_GROUP_ROUTE}/rename"
4323        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
4324        req_json = {"group_id": group_id, "group_name": group_name}
4325        res = requests.post(url, json=req_json, headers=headers, **self._request_params)
4326
4327        if not res.ok:
4328            error_msg = self._try_extract_error_code(res)
4329            logger.error(error_msg)
4330            raise BoostedAPIException(f"Failed to rename portfolio group: {error_msg}")
4331
4332        data = res.json()
4333        return data
4334
4335    def add_to_portfolio_group(
4336        self,
4337        group_id: str,
4338        portfolios: List[Dict],
4339    ) -> Dict:
4340        """
4341        Add portfolios to a group
4342
4343        Parameters
4344        ----------
4345
4346        group_id: str,
4347           UUID str identifying a portfolio group
4348
4349        portfolios: List of Dict [:
4350            portfolio_id: str
4351            rank_in_group: Optional[int] = None
4352        ]
4353
4354
4355        Returns:
4356        -------
4357        Dict {
4358            added: int
4359               number of successful changes
4360        }
4361        """
4362        url = f"{self.base_uri}{WATCHLIST_ROUTE_PREFIX}{PORTFOLIO_GROUP_ROUTE}/add-to-group"
4363        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
4364        req_json = {"group_id": group_id, "portfolios": portfolios}
4365
4366        res = requests.post(url, json=req_json, headers=headers, **self._request_params)
4367
4368        if not res.ok:
4369            error_msg = self._try_extract_error_code(res)
4370            logger.error(error_msg)
4371            raise BoostedAPIException(f"Failed to add portfolios to portfolio group: {error_msg}")
4372
4373        data = res.json()
4374        return data
4375
4376    def remove_from_portfolio_group(
4377        self,
4378        group_id: str,
4379        portfolios: List[str],
4380    ) -> Dict:
4381        """
4382        Remove portfolios from a group
4383
4384        Parameters
4385        ----------
4386
4387        group_id: str,
4388           UUID str identifying a portfolio group
4389
4390        portfolios: List of str
4391
4392
4393        Returns:
4394        -------
4395        Dict {
4396            removed: int
4397               number of successful changes
4398        }
4399        """
4400        url = f"{self.base_uri}{WATCHLIST_ROUTE_PREFIX}{PORTFOLIO_GROUP_ROUTE}/remove-from-group"
4401        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
4402        req_json = {"group_id": group_id, "portfolios": portfolios}
4403        res = requests.post(url, json=req_json, headers=headers, **self._request_params)
4404
4405        if not res.ok:
4406            error_msg = self._try_extract_error_code(res)
4407            logger.error(error_msg)
4408            raise BoostedAPIException(
4409                f"Failed to remove portfolios from portfolio group: {error_msg}"
4410            )
4411
4412        data = res.json()
4413        return data
4414
4415    def delete_portfolio_group(
4416        self,
4417        group_id: str,
4418    ) -> Dict:
4419        """
4420        Delete a portfolio group
4421
4422        Parameters
4423        ----------
4424
4425        group_id: str,
4426           UUID str identifying a portfolio group
4427
4428
4429        Returns:
4430        -------
4431        Dict {
4432            removed_groups: int
4433               number of successful changes
4434
4435            removed_portfolios: int
4436               number of successful changes
4437        }
4438        """
4439        url = f"{self.base_uri}{WATCHLIST_ROUTE_PREFIX}{PORTFOLIO_GROUP_ROUTE}/remove"
4440        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
4441        req_json = {"group_id": group_id}
4442        res = requests.post(url, json=req_json, headers=headers, **self._request_params)
4443
4444        if not res.ok:
4445            error_msg = self._try_extract_error_code(res)
4446            logger.error(error_msg)
4447            raise BoostedAPIException(f"Failed to delete portfolio group: {error_msg}")
4448
4449        data = res.json()
4450        return data
4451
4452    def set_portfolio_group_for_watchlist(
4453        self,
4454        portfolio_group_id: str,
4455        watchlist_id: str,
4456    ) -> Dict:
4457        """
4458        Set portfolio group for watchlist.
4459
4460        Parameters
4461        ----------
4462
4463        portfolio_group_id: str,
4464           UUID str identifying a portfolio group
4465
4466        watchlist_id: str,
4467           UUID str identifying a watchlist
4468
4469
4470        Returns:
4471        -------
4472        Dict {
4473            success: bool
4474            errors:
4475            data: Dict
4476                changed: int
4477        }
4478        """
4479        url = f"{self.base_uri}{WATCHLIST_ROUTE_PREFIX}{DAL_WATCHLIST_ROUTE}/set-portfolio-groups/"
4480        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
4481        req_json = {"portfolio_group_id": portfolio_group_id, "watchlist_id": watchlist_id}
4482        res = requests.post(url, json=req_json, headers=headers, **self._request_params)
4483
4484        if not res.ok:
4485            error_msg = self._try_extract_error_code(res)
4486            logger.error(error_msg)
4487            raise BoostedAPIException(f"Failed to set portfolio group for watchlist: {error_msg}")
4488
4489        return res.json()
4490
4491    def get_ranking_dates(self, model_id: str, portfolio_id: str) -> List[datetime.date]:
4492        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
4493        url = self.base_uri + f"/api/analysis/ranking-dates/{model_id}/{portfolio_id}"
4494        res = requests.get(url, headers=headers, **self._request_params)
4495        self._check_ok_or_err_with_msg(res, "Failed to get ranking dates")
4496        data = res.json().get("ranking_dates", [])
4497
4498        return [parser.parse(d).date() for d in data]
4499
4500    def get_prior_ranking_date(
4501        self, ranking_dates: List[datetime.date], starting_date: datetime.date
4502    ) -> datetime.date:
4503        """
4504        Given a starting date and a list of ranking dates, return the most
4505        recent previous ranking date.
4506        """
4507        # order from most recent to least
4508        ranking_dates.sort(reverse=True)
4509
4510        for d in ranking_dates:
4511            if d <= starting_date:
4512                return d
4513
4514        # if we get here, the starting date is before the earliest ranking date
4515        raise BoostedAPIException(f"No rankins exist on or before {starting_date}")
4516
4517    def _get_risk_factors_descriptors(
4518        self, model_id: str, portfolio_id: str, use_v2: bool = False
4519    ) -> Dict[int, str]:
4520        """Returns a map from descriptor id to descriptor name."""
4521        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
4522
4523        risk_factor = RISK_FACTOR_V2 if use_v2 else RISK_FACTOR
4524        url = self.base_uri + f"/api/{risk_factor}/{model_id}/{portfolio_id}/descriptors"
4525        res = requests.get(url, headers=headers, **self._request_params)
4526
4527        self._check_ok_or_err_with_msg(res, "Failed to get risk factor descriptors")
4528
4529        descriptors = {int(i): name for i, name in res.json().items() if i.isnumeric()}
4530        return descriptors
4531
4532    def get_risk_groups(
4533        self, model_id: str, portfolio_id: str, date: datetime.date, use_v2: bool = False
4534    ) -> List[Dict[str, Any]]:
4535        # first get the group descriptors
4536        descriptors = self._get_risk_factors_descriptors(model_id, portfolio_id, use_v2)
4537
4538        # calculate the most recent prior rankings date. This is the date
4539        # we need to use to query for risk group data.
4540        ranking_dates = self.get_ranking_dates(model_id, portfolio_id)
4541        ranking_date = self.get_prior_ranking_date(ranking_dates, date)
4542        date_str = ranking_date.strftime("%Y-%m-%d")
4543
4544        risk_factor = RISK_FACTOR_V2 if use_v2 else RISK_FACTOR
4545
4546        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
4547        url = self.base_uri + f"/api/{risk_factor}/{model_id}/{portfolio_id}/risk-groups/{date_str}"
4548        res = requests.get(url, headers=headers, **self._request_params)
4549
4550        self._check_ok_or_err_with_msg(
4551            res, f"Failed to get risk factors for {model_id=}, {portfolio_id=}, {date=}"
4552        )
4553
4554        # Response is a list of objects like:
4555        # [
4556        #   [
4557        #     0,
4558        #     14,
4559        #     1
4560        #   ],
4561        #   [
4562        #     25,
4563        #     12,
4564        #     13
4565        #   ],
4566        # 0.67013
4567        # ],
4568        #
4569        # Where each integer in the lists is a descriptor id.
4570
4571        groups = []
4572        for i, row in enumerate(res.json()):
4573            row_map: Dict[str, Any] = {}
4574            # map descriptor id to name
4575            row_map["machine"] = i + 1  # start at 1 not 0
4576            row_map["risk_group_a"] = [descriptors[i] for i in row[0]]
4577            row_map["risk_group_b"] = [descriptors[i] for i in row[1]]
4578            row_map["volatility_explained"] = row[2]
4579            groups.append(row_map)
4580
4581        return groups
4582
4583    def get_risk_factors_discovered_descriptors(
4584        self, model_id: str, portfolio_id: str, date: datetime.date, use_v2: bool = False
4585    ) -> pd.DataFrame:
4586        # first get the group descriptors
4587        descriptors = self._get_risk_factors_descriptors(model_id, portfolio_id)
4588
4589        # calculate the most recent prior rankings date. This is the date
4590        # we need to use to query for risk group data.
4591        ranking_dates = self.get_ranking_dates(model_id, portfolio_id)
4592        ranking_date = self.get_prior_ranking_date(ranking_dates, date)
4593        date_str = ranking_date.strftime("%Y-%m-%d")
4594
4595        risk_factor = RISK_FACTOR_V2 if use_v2 else RISK_FACTOR
4596
4597        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
4598        url = (
4599            self.base_uri
4600            + f"/api/{risk_factor}/{model_id}/{portfolio_id}/risk-descriptors/json/{date_str}"
4601        )
4602        res = requests.get(url, headers=headers, **self._request_params)
4603
4604        self._check_ok_or_err_with_msg(
4605            res, f"Failed to get risk factors for {model_id=}, {portfolio_id=}, {date=}"
4606        )
4607
4608        # Endpoint returns a nested list of floats
4609        df = pd.DataFrame(res.json(), columns=RISK_FACTOR_COLUMNS)
4610
4611        # This flat dataframe represents a potentially doubly nested structure
4612        # of Sector -> (high/low volatility) -> security. We don't care about
4613        # the high/low volatility rows, (which will have negative identifiers)
4614        # so we can filter these out.
4615        df = df[df["identifier"] >= 0]
4616
4617        # now, any values that had a depth of 2 should be set to a depth of 1,
4618        # since we removed the double nesting.
4619        df.replace(to_replace=2, value=1, inplace=True)
4620
4621        # This dataframe represents data that is nested on the UI, so the
4622        # "depth" field indicates which level of nesting each row is at. At this
4623        # point, a depth of 0 indicates a sector, and following depth 1 rows are
4624        # securities within the sector.
4625
4626        # Identifiers in rows with depth 1 will be gbi ids, need to convert to
4627        # symbols.
4628        gbi_ids = df[df["depth"] == 1]["identifier"].tolist()
4629        sec_info = self._get_security_info(gbi_ids)["data"]["securities"]
4630        sec_map = {s["gbiId"]: s["symbol"] for s in sec_info}
4631
4632        def convert_ids(row: pd.Series) -> pd.Series:
4633            # convert each row's "identifier" to the appropriate id type. If the
4634            # depth is 0, the identifier should be a sector, otherwise it should
4635            # be a ticker.
4636            ident = int(row["identifier"])
4637            row["identifier"] = (
4638                descriptors.get(ident).title() if row["depth"] == 0 else sec_map.get(ident)
4639            )
4640            return row
4641
4642        df["depth"] = df["depth"].astype(int)
4643        df["stock_count"] = df["stock_count"].astype(int)
4644        df = df.apply(convert_ids, axis=1)
4645        df = df.reset_index(drop=True)
4646        return df
4647
4648    def get_risk_factors_sectors(
4649        self, model_id: str, portfolio_id: str, date: datetime.date, use_v2: bool = False
4650    ) -> pd.DataFrame:
4651        # first get the group descriptors
4652        sectors = {s["id"]: s["name"] for s in self._get_sector_info()}
4653
4654        # calculate the most recent prior rankings date. This is the date
4655        # we need to use to query for risk group data.
4656        ranking_dates = self.get_ranking_dates(model_id, portfolio_id)
4657        ranking_date = self.get_prior_ranking_date(ranking_dates, date)
4658        date_str = ranking_date.strftime("%Y-%m-%d")
4659
4660        risk_factor = RISK_FACTOR_V2 if use_v2 else RISK_FACTOR
4661
4662        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
4663        url = (
4664            self.base_uri
4665            + f"/api/{risk_factor}/{model_id}/{portfolio_id}/risk-sectors/json/{date_str}"
4666        )
4667        res = requests.get(url, headers=headers, **self._request_params)
4668
4669        self._check_ok_or_err_with_msg(
4670            res, f"Failed to get risk factors for {model_id=}, {portfolio_id=}, {date=}"
4671        )
4672
4673        # Endpoint returns a nested list of floats
4674        df = pd.DataFrame(res.json(), columns=RISK_FACTOR_COLUMNS)
4675
4676        # identifier is a gics sector identifier
4677        df["identifier"] = df["identifier"].apply(lambda i: sectors.get(int(i), None))
4678
4679        # This dataframe represents data that is nested on the UI, so the
4680        # "depth" field indicates which level of nesting each row is at. For
4681        # risk factors sectors, each "depth" represents a level of specificity
4682        # for the sector. E.g. Energy -> Energy Equipment -> Oil & Gas Equipment
4683        df["depth"] = df["depth"].astype(int)
4684        df["stock_count"] = df["stock_count"].astype(int)
4685        df = df.reset_index(drop=True)
4686        return df
4687
4688    def download_complete_portfolio_data(
4689        self, model_id: str, portfolio_id: str, download_filepath: str
4690    ):
4691        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
4692        url = self.base_uri + f"/api/models/{model_id}/{portfolio_id}/excel"
4693
4694        res = requests.get(url, headers=headers, **self._request_params)
4695        self._check_ok_or_err_with_msg(
4696            res, f"Failed to get full data for {model_id=}, {portfolio_id=}"
4697        )
4698
4699        with open(download_filepath, "wb") as f:
4700            f.write(res.content)
4701
4702    def diff_hedge_experiment_portfolio_data(
4703        self,
4704        hedge_experiment_id: str,
4705        comparison_portfolios: List[str],
4706        categories: List[str],
4707    ) -> Dict:
4708        qry = """
4709        query diffHedgeExperimentPortfolios(
4710            $input: DiffHedgeExperimentPortfoliosInput!
4711        ) {
4712            diffHedgeExperimentPortfolios(input: $input) {
4713            data {
4714                diffs {
4715                    volatility {
4716                        date
4717                        vol5D
4718                        vol10D
4719                        vol21D
4720                        vol21D
4721                        vol63D
4722                        vol126D
4723                        vol189D
4724                        vol252D
4725                        vol315D
4726                        vol378D
4727                        vol441D
4728                        vol504D
4729                    }
4730                    performance {
4731                        date
4732                        value
4733                    }
4734                    performanceGrid {
4735                        headerRow
4736                        values
4737                    }
4738                    factors {
4739                        date
4740                        momentum
4741                        growth
4742                        size
4743                        value
4744                        dividendYield
4745                        volatility
4746                    }
4747                }
4748            }
4749            errors
4750            }
4751        }
4752        """
4753        headers = {"Authorization": "ApiKey " + self.api_key}
4754        params = {
4755            "hedgeExperimentId": hedge_experiment_id,
4756            "portfolioIds": comparison_portfolios,
4757            "categories": categories,
4758        }
4759        resp = requests.post(
4760            f"{self.base_uri}/api/graphql",
4761            json={"query": qry, "variables": params},
4762            headers=headers,
4763            params=self._request_params,
4764        )
4765
4766        json_resp = resp.json()
4767
4768        # graphql endpoints typically return 200 or 400 status codes, so we must
4769        # check if we have any errors, even with a 200
4770        if (resp.ok and "errors" in json_resp) or not resp.ok:
4771            error_msg = self._try_extract_error_code(resp)
4772            logger.error(error_msg)
4773            raise BoostedAPIException(
4774                (
4775                    f"Failed to get portfolio diffs for {hedge_experiment_id=}: "
4776                    f"{resp.status_code=}; {error_msg=}"
4777                )
4778            )
4779
4780        diffs = json_resp["data"]["diffHedgeExperimentPortfolios"]["data"]["diffs"]
4781        comparisons = {}
4782        for pf, cmp in zip(comparison_portfolios, diffs):
4783            res: Dict[str, Any] = {
4784                "performance": None,
4785                "performanceGrid": None,
4786                "factors": None,
4787                "volatility": None,
4788            }
4789            if "performanceGrid" in cmp:
4790                grid = cmp["performanceGrid"]
4791                grid_df = pd.DataFrame(grid["values"], columns=grid["headerRow"])
4792                res["performanceGrid"] = grid_df
4793            if "performance" in cmp:
4794                perf_df = pd.DataFrame(cmp["performance"]).set_index("date")
4795                perf_df.index = pd.to_datetime(perf_df.index)
4796                res["performance"] = perf_df
4797            if "volatility" in cmp:
4798                vol_df = pd.DataFrame(cmp["volatility"]).set_index("date")
4799                vol_df.index = pd.to_datetime(vol_df.index)
4800                res["volatility"] = vol_df
4801            if "factors" in cmp:
4802                factors_df = pd.DataFrame(cmp["factors"]).set_index("date")
4803                factors_df.index = pd.to_datetime(factors_df.index)
4804                res["factors"] = factors_df
4805            comparisons[pf] = res
4806        return comparisons
4807
4808    def get_signal_strength(self, model_id: str, portfolio_id: str) -> pd.DataFrame:
4809        url = self.base_uri + f"/api/analysis/signal_strength/{model_id}/{portfolio_id}"
4810        headers = {"Authorization": "ApiKey " + self.api_key}
4811
4812        logger.info(f"Retrieving portfolio signals for {model_id=}, {portfolio_id=}")
4813
4814        # Response format is a json object with a "header_row" key for column
4815        # names, and then a nested list of data.
4816        resp = requests.get(url, headers=headers, **self._request_params)
4817        self._check_ok_or_err_with_msg(
4818            resp, f"Failed to get portfolio signals for {model_id=}, {portfolio_id=}"
4819        )
4820
4821        data = resp.json()
4822
4823        df = pd.DataFrame(data=data["data"], columns=data["header_row"])
4824        df["Date"] = pd.to_datetime(df["Date"])
4825        df = df.set_index("Date")
4826        return df.astype(float)
4827
4828    def get_rolling_signal_strength(self, model_id: str, portfolio_id: str) -> pd.DataFrame:
4829        url = self.base_uri + f"/api/analysis/signal_strength_rolling/{model_id}/{portfolio_id}"
4830        headers = {"Authorization": "ApiKey " + self.api_key}
4831
4832        logger.info(f"Retrieving rolling portfolio signals for {model_id=}, {portfolio_id=}")
4833
4834        # Response format is a json object with a "header_row" key for column
4835        # names, and then a nested list of data.
4836        resp = requests.get(url, headers=headers, **self._request_params)
4837        self._check_ok_or_err_with_msg(
4838            resp, f"Failed to get rolling portfolio signals for {model_id=}, {portfolio_id=}"
4839        )
4840
4841        data = resp.json()
4842
4843        df = pd.DataFrame(data=data["data"], columns=data["header_row"])
4844        df["Date"] = pd.to_datetime(df["Date"])
4845        df = df.set_index("Date")
4846        return df.astype(float)
4847
4848    def get_portfolio_quantiles(
4849        self,
4850        model_id: str,
4851        portfolio_id: str,
4852        id_type: Literal["TICKER", "ISIN"] = "TICKER",
4853    ):
4854        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
4855        date = datetime.date.today().strftime("%Y-%m-%d")
4856
4857        payload = {
4858            "model_id": model_id,
4859            "portfolio_id": portfolio_id,
4860            "fields": ["quantile"],
4861            "min_date": date,
4862            "max_date": date,
4863            "return_format": "json",
4864        }
4865        # TODO: Later change this URI to not use the watchlist prefix. It is misnamed.
4866        url = f"{self.base_uri}{WATCHLIST_ROUTE_PREFIX}{DAL_PA_ROUTE}/get-data/"
4867
4868        res: requests.Response = requests.post(
4869            url, json=payload, headers=headers, **self._request_params
4870        )
4871        self._check_ok_or_err_with_msg(res, "Unable to get quantile data")
4872
4873        resp: Dict = res.json()
4874        quantile_index = resp["field_map"]["Quantile"]
4875        quantile_data = [[c[quantile_index] for c in r] for r in resp["data"]]
4876        date_cols = pd.to_datetime(resp["columns"])
4877
4878        # Need to map gbi id's to isins or tickers
4879        gbi_ids = [int(i) for i in resp["rows"]]
4880        security_info = self._get_security_info(gbi_ids)
4881
4882        # We now have security data, go through and create a map from internal
4883        # gbi id to client facing identifier
4884        id_key = "isin" if id_type == "ISIN" else "symbol"
4885        gbi_identifier_map = {
4886            sec["gbiId"]: sec[id_key] for sec in security_info["data"]["securities"]
4887        }
4888
4889        df = pd.DataFrame(quantile_data, index=gbi_ids, columns=date_cols).transpose()
4890        df = df.rename(columns=gbi_identifier_map)
4891        return df
4892
4893    def get_similar_stocks(
4894        self,
4895        model_id: str,
4896        portfolio_id: str,
4897        symbol_list: List[str],
4898        date: BoostedDate,
4899        identifier_type: Literal["TICKER", "ISIN"],
4900        preferred_country: Optional[str] = None,
4901        preferred_currency: Optional[str] = None,
4902    ) -> pd.DataFrame:
4903        date_str = convert_date(date).strftime("%Y-%m-%d")
4904
4905        sec_data = self.getGbiIdFromIdentCountryCurrencyDate(
4906            ident_country_currency_dates=[
4907                DateIdentCountryCurrency(
4908                    date=datetime.date.today().isoformat(),
4909                    identifier=s,
4910                    id_type=(
4911                        ColumnSubRole.SYMBOL if identifier_type == "TICKER" else ColumnSubRole.ISIN
4912                    ),
4913                    country=preferred_country,
4914                    currency=preferred_currency,
4915                )
4916                for s in symbol_list
4917            ]
4918        )
4919
4920        gbi_id_ident_map: Dict[int, str] = {}
4921        for sec in sec_data:
4922            ident = sec.ticker if identifier_type == "TICKER" else sec.isin_info.identifier
4923            gbi_id_ident_map[sec.gbi_id] = ident
4924        gbi_ids = list(gbi_id_ident_map.keys())
4925
4926        qry = """
4927          query GetSimilarStocks(
4928            $modelId: ID!
4929            $portfolioId: ID!
4930            $gbiIds: [Int]!
4931            $startDate: String!
4932            $endDate: String!
4933            $includeCorrelation: Boolean
4934          ) {
4935            similarStocks(
4936              modelId: $modelId,
4937              portfolioId: $portfolioId,
4938              gbiIds: $gbiIds,
4939              startDate: $startDate,
4940              endDate: $endDate,
4941              includeCorrelation: $includeCorrelation
4942            ) {
4943              gbiId
4944              overallSimilarityScore
4945              priceSimilarityScore
4946              factorSimilarityScore
4947              correlation
4948            }
4949          }
4950        """
4951        variables = {
4952            "startDate": date_str,
4953            "endDate": date_str,
4954            "modelId": model_id,
4955            "portfolioId": portfolio_id,
4956            "gbiIds": gbi_ids,
4957            "includeCorrelation": True,
4958        }
4959
4960        resp = self._get_graphql(
4961            qry, variables=variables, error_msg_prefix="Failed to get similar stocks result: "
4962        )
4963        df = pd.DataFrame(resp["data"]["similarStocks"])
4964
4965        # Now that we have the rest of the securities in the portfolio, we need
4966        # to map them back to the correct identifiers
4967        all_gbi_ids = df["gbiId"].tolist()
4968        sec_info = self._get_security_info(all_gbi_ids)
4969        for s in sec_info["data"]["securities"]:
4970            ident = s["symbol"] if identifier_type == "TICKER" else s["isin"]
4971            gbi_id_ident_map[s["gbiId"]] = ident
4972        df["identifier"] = df["gbiId"].map(gbi_id_ident_map)
4973        df = df.set_index("identifier")
4974        return df.drop("gbiId", axis=1)
4975
4976    def get_portfolio_trades(
4977        self,
4978        model_id: str,
4979        portfolio_id: str,
4980        start_date: Optional[BoostedDate] = None,
4981        end_date: Optional[BoostedDate] = None,
4982    ) -> pd.DataFrame:
4983        if not end_date:
4984            end_date = datetime.date.today()
4985        end_date = convert_date(end_date)
4986
4987        if not start_date:
4988            # default to a year of data
4989            start_date = end_date - datetime.timedelta(days=365)
4990        start_date = convert_date(start_date)
4991
4992        start_date_str = start_date.strftime("%Y-%m-%d")
4993        end_date_str = end_date.strftime("%Y-%m-%d")
4994
4995        if end_date - start_date > datetime.timedelta(days=365 * 7):
4996            raise BoostedAPIException(
4997                f"Date range ({start_date_str}, {end_date_str}) too large, max 7 years"
4998            )
4999
5000        url = f"{self.base_uri}{ROUTE_PREFIX}{DAL_PA_ROUTE}/get-data/"
5001        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
5002        payload = {
5003            "model_id": model_id,
5004            "portfolio_id": portfolio_id,
5005            "fields": ["price", "shares_traded", "shares_owned"],
5006            "min_date": start_date_str,
5007            "max_date": end_date_str,
5008            "return_format": "json",
5009        }
5010
5011        res: requests.Response = requests.post(
5012            url, json=payload, headers=headers, **self._request_params
5013        )
5014        self._check_ok_or_err_with_msg(res, "Unable to get portfolio trades data")
5015
5016        data = res.json()
5017        gbi_ids = [int(ident) for ident in data["rows"]]
5018
5019        # need both isin and ticker to distinguish between possible duplicates
5020        isin_map = {
5021            str(s["gbiId"]): s["isin"]
5022            for s in self._get_security_info(gbi_ids)["data"]["securities"]
5023        }
5024        ticker_map = {
5025            str(s["gbiId"]): s["symbol"]
5026            for s in self._get_security_info(gbi_ids)["data"]["securities"]
5027        }
5028
5029        # construct individual dataframes for each security, then join them together
5030        dfs: List[pd.DataFrame] = []
5031        full_data = data["data"]
5032        for i, gbi_id in enumerate(data["rows"]):
5033            df = pd.DataFrame(
5034                index=pd.to_datetime(data["columns"]), columns=data["fields"], data=full_data[i]
5035            )
5036            # drop rows where no shares are owned or traded
5037            df.drop(
5038                df.loc[((df["shares_owned"] == 0.0) & (df["shares_traded"] == 0.0))].index,
5039                inplace=True,
5040            )
5041            df["isin"] = isin_map[gbi_id]
5042            df["ticker"] = ticker_map[gbi_id]
5043            dfs.append(df)
5044
5045        full_df = pd.concat(dfs)
5046        full_df["date"] = full_df.index
5047        full_df.sort_index(inplace=True)
5048        full_df.reset_index(drop=True, inplace=True)
5049
5050        # reorder the columns to match the spreadsheet
5051        columns = ["isin", "ticker", "date", *data["fields"]]
5052        return full_df[columns]
5053
5054    def get_ideas(
5055        self,
5056        model_id: str,
5057        portfolio_id: str,
5058        investment_horizon: Literal["1M", "3M", "1Y"] = "1M",
5059        delta_horizon: str = "1M",
5060    ):
5061        if investment_horizon not in ("1M", "3M", "1Y"):
5062            raise BoostedAPIException(f"Invalid investment horizon: {investment_horizon}")
5063
5064        if delta_horizon not in ("1W", "1M", "3M", "6M", "9M", "1Y"):
5065            raise BoostedAPIException(f"Invalid delta horizon: {delta_horizon}")
5066
5067        # First compute dates based on the delta horizon. "0D" is the latest rebalance.
5068        try:
5069            dates = self._get_portfolio_rebalance_from_periods(
5070                portfolio_id=portfolio_id, rel_periods=["0D", delta_horizon]
5071            )
5072        except Exception:
5073            raise BoostedAPIException(
5074                f"Portfolio {portfolio_id} does not exist or you do not have permission to view it."
5075            )
5076        end_date = dates[0].strftime("%Y-%m-%d")
5077        start_date = dates[1].strftime("%Y-%m-%d")
5078
5079        resp = self._get_graphql(
5080            graphql_queries.GET_IDEAS_QUERY,
5081            variables={
5082                "modelId": model_id,
5083                "portfolioId": portfolio_id,
5084                "horizon": investment_horizon,
5085                "deltaHorizon": delta_horizon,
5086                "startDate": start_date,
5087                "endDate": end_date,
5088                # Note: market data date is needed to fetch market cap.
5089                # we don't fetch that data from this endpoint so we stub
5090                # out the mandatory parameter with the end date requested
5091                "marketDataDate": end_date,
5092            },
5093            error_msg_prefix="Failed to get ideas: ",
5094        )
5095        # rows is a list of dicts like:
5096        # {
5097        #   "category": "Strong Sell",
5098        #   "dividendYield": 0.0,
5099        #   "reason": "Boosted Insights has given this stock...",
5100        #   "rating": 0.458167,
5101        #   "ratingDelta": 0.438087,
5102        #   "risk": {
5103        #     "text": "high"
5104        #   },
5105        #   "security": {
5106        #     "symbol": "BA"
5107        #   }
5108        # }
5109        try:
5110            rows = resp["data"]["recommendations"]["recommendations"]
5111            data = [
5112                {
5113                    "symbol": r["security"]["symbol"],
5114                    "recommendation": r["category"],
5115                    "rating": r["rating"],
5116                    "rating_delta": r["ratingDelta"],
5117                    "dividend_yield": r["dividendYield"],
5118                    "predicted_excess_return_1m": r["ER"]["oneMonth"],
5119                    "predicted_excess_return_3m": r["ER"]["threeMonth"],
5120                    "predicted_excess_return_1y": r["ER"]["oneYear"],
5121                    "risk": r["risk"]["text"],
5122                    "reward": r["reward"]["text"],
5123                    "reason": r["reason"],
5124                }
5125                for r in rows
5126            ]
5127            df = pd.DataFrame(data)
5128            df.set_index("symbol", inplace=True)
5129        except Exception:
5130            # Don't show old exception info to client
5131            raise BoostedAPIException(
5132                "No recommendations found, try selecting another horizon."
5133            ) from None
5134
5135        return df
5136
5137    def get_stock_recommendations(
5138        self,
5139        model_id: str,
5140        portfolio_id: str,
5141        symbols: Optional[List[str]] = None,
5142        investment_horizon: Literal["1M", "3M", "1Y"] = "1M",
5143    ) -> pd.DataFrame:
5144        model_stocks = self._get_model_stocks(model_id)
5145
5146        symbols_to_gbiids = {s.ticker: s.gbi_id for s in model_stocks}
5147        gbi_ids_to_symbols = {s.gbi_id: s.ticker for s in model_stocks}
5148
5149        variables: Dict[str, Any] = {
5150            "strategyId": portfolio_id,
5151        }
5152        if symbols:
5153            variables["gbiIds"] = [
5154                symbols_to_gbiids.get(symbol) for symbol in symbols if symbols_to_gbiids.get(symbol)
5155            ]
5156        try:
5157            recs = self._get_graphql(
5158                graphql_queries.MULTI_STOCK_RECOMMENDATION_QUERY,
5159                variables=variables,
5160                log_error=False,
5161            )["data"]["currentRecommendationsFull"]
5162        except BoostedAPIException:
5163            raise BoostedAPIException(f"Error getting recommendations for strategy {portfolio_id}")
5164
5165        data = []
5166        recommendation_key = f"recommendation{investment_horizon}"
5167        for rec in recs:
5168            # Keys to rec are:
5169            # ['ER', 'rewardCategories', 'riskCategories', 'reasons',
5170            #  'recommendation', 'rewardCategory', 'riskCategory']
5171            # need to flatten these out and add to a DF
5172            rec_data = rec[recommendation_key]
5173            reasons_dict = {r["type"]: r["text"] for r in rec_data["reasons"]}
5174            row = {
5175                "symbol": gbi_ids_to_symbols[rec["gbiId"]],
5176                "recommendation": rec_data["currentCategory"],
5177                "predicted_excess_return_1m": rec_data["ER"]["oneMonth"],
5178                "predicted_excess_return_3m": rec_data["ER"]["threeMonth"],
5179                "predicted_excess_return_1y": rec_data["ER"]["oneYear"],
5180                "risk": rec_data["risk"]["text"],
5181                "reward": rec_data["reward"]["text"],
5182                "reasons": reasons_dict,
5183            }
5184
5185            data.append(row)
5186        df = pd.DataFrame(data)
5187        df.set_index("symbol", inplace=True)
5188        return df
5189
5190    # NOTE: this could be easily expanded to the entire stockRecommendation
5191    # entity, but that only includes all horizons' excess returns and risk/reward
5192    # which we already get from getIdeas
5193    def get_stock_recommendation_reasons(
5194        self,
5195        model_id: str,
5196        portfolio_id: str,
5197        investment_horizon: Literal["1M", "3M", "1Y"] = "1M",
5198        symbols: Optional[List[str]] = None,
5199    ) -> Dict[str, Optional[List[str]]]:
5200        if investment_horizon not in ("1M", "3M", "1Y"):
5201            raise BoostedAPIException(f"Invalid investment horizon: {investment_horizon}")
5202
5203        # "0D" is the latest rebalance - its all we have in terms of recs
5204        dates = self._get_portfolio_rebalance_from_periods(
5205            portfolio_id=portfolio_id, rel_periods=["0D"]
5206        )
5207        date = dates[0].strftime("%Y-%m-%d")
5208
5209        model_stocks = self._get_model_stocks(model_id)
5210
5211        symbols_to_gbiids = {s.ticker: s.gbi_id for s in model_stocks}
5212        if symbols is None:  # potentially iterate through all holdings
5213            symbols = symbols_to_gbiids.keys()  # type: ignore
5214
5215        reasons: Dict[str, Optional[List[str]]] = {}
5216        for sym in symbols:
5217            # it's possible that a passed symbol was not actually a portfolio holding
5218            try:
5219                gbi_id = symbols_to_gbiids[sym]
5220            except KeyError:
5221                logger.warning(f"Symbol={sym} not found for in universe on {date=}")
5222                reasons[sym] = None
5223                continue
5224
5225            try:
5226                recs = self._get_graphql(
5227                    graphql_queries.STOCK_RECOMMENDATION_QUERY,
5228                    variables={
5229                        "modelId": model_id,
5230                        "portfolioId": portfolio_id,
5231                        "horizon": investment_horizon,
5232                        "gbiId": gbi_id,
5233                        "date": date,
5234                    },
5235                    log_error=False,
5236                )
5237                reasons[sym] = [
5238                    reason["text"] for reason in recs["data"]["stockRecommendation"]["reasons"]
5239                ]
5240            except BoostedAPIException:
5241                logger.warning(f"No recommendation for: {sym}, skipping...")
5242        return reasons
5243
5244    def get_stock_mapping_alternatives(
5245        self,
5246        isin: Optional[str] = None,
5247        symbol: Optional[str] = None,
5248        country: Optional[str] = None,
5249        currency: Optional[str] = None,
5250        asof_date: Optional[BoostedDate] = None,
5251    ) -> Dict:
5252        """
5253        Return the stock mapping for the given criteria,
5254        also suggestions for alternate matches,
5255        if the mapping is not what is wanted
5256
5257
5258            Parameters [One of either ISIN or SYMBOL must be provided]
5259            ----------
5260            isin: Optional[str]
5261                search by ISIN
5262            symbol: Optional[str]
5263                search by Ticker Symbol
5264            country: Optional[str]
5265                Additionally filter by country code - ex: None, "ANY", "p_USA", "CAN"
5266            currency: Optional[str]
5267                Additionally filter by currency code - ex: None, "ANY", "p_USD", "CAD"
5268            asof_date: Optional[date]
5269                as of which date to perform the search, default is today()
5270
5271            Note: country/currency filter starting with "p_" indicates
5272                  only a soft preference but allows other matches
5273
5274        Returns
5275        -------
5276        Dictionary Representing this 'MapSecurityResponse' structure:
5277
5278        class MapSecurityResponse():
5279            stock_mapping: Optional[SecurityInfo]
5280               The mapping we would perform given your inputs
5281
5282            alternatives: Optional[List[SecurityInfo]]
5283               Alternative suggestions based on your input
5284
5285            error: Optional[str]
5286
5287        class SecurityInfo():
5288            gbi_id: int
5289            isin: str
5290            symbol: Optional[str]
5291            country: str
5292            currency: str
5293            name: str
5294            from_date: date
5295            to_date: date
5296            is_primary_trading_item: bool
5297
5298        """
5299
5300        url = f"{self.base_uri}/api/stock-mapping/alternatives"
5301        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
5302        req_json: Dict = {
5303            "isin": isin,
5304            "symbol": symbol,
5305            "countryPreference": country,
5306            "currencyPreference": currency,
5307        }
5308
5309        if asof_date:
5310            req_json["date"] = convert_date(asof_date).isoformat()
5311
5312        res = requests.post(url, json=req_json, headers=headers, **self._request_params)
5313
5314        if not res.ok:
5315            error_msg = self._try_extract_error_code(res)
5316            logger.error(error_msg)
5317            raise BoostedAPIException(f"Failed to get user watchlists: {error_msg}")
5318
5319        data = res.json()
5320        return data
5321
5322    def get_pros_cons_for_stocks(
5323        self,
5324        model_id: Optional[str] = None,
5325        symbols: Optional[List[str]] = None,
5326        preferred_country: Optional[str] = None,
5327        preferred_currency: Optional[str] = None,
5328    ) -> Dict[str, Dict[str, List]]:
5329        if symbols:
5330            ident_objs = [
5331                DateIdentCountryCurrency(
5332                    date=datetime.date.today().strftime("%Y-%m-%d"),
5333                    identifier=symbol,
5334                    country=preferred_country,
5335                    currency=preferred_currency,
5336                    id_type=ColumnSubRole.SYMBOL,
5337                )
5338                for symbol in symbols
5339            ]
5340            sec_objs = self.getGbiIdFromIdentCountryCurrencyDate(
5341                ident_country_currency_dates=ident_objs
5342            )
5343            gbi_id_ticker_map = {sec.gbi_id: sec.ticker for sec in sec_objs if sec}
5344        elif model_id:
5345            gbi_id_ticker_map = {
5346                sec.gbi_id: sec.ticker for sec in self._get_model_stocks(model_id=model_id)
5347            }
5348        gbi_id_pros_cons_map = {}
5349        gbi_ids = list(gbi_id_ticker_map.keys())
5350        data = self._get_graphql(
5351            query=graphql_queries.GET_PROS_CONS_QUERY,
5352            variables={"gbiIds": gbi_ids},
5353            error_msg_prefix="Failed to get pros/cons:",
5354        )
5355        gbi_id_pros_cons_map = {
5356            row["gbiId"]: {"pros": row["pros"], "cons": row["cons"]}
5357            for row in data["data"]["bulkSecurityProsCons"]
5358        }
5359
5360        return {
5361            gbi_id_ticker_map[gbi_id]: pros_cons
5362            for gbi_id, pros_cons in gbi_id_pros_cons_map.items()
5363        }
5364
5365    def generate_theme(self, theme_name: str, stock_universes: List[ThemeUniverse]) -> str:
5366        # First get universe name and id mappings
5367        try:
5368            resp = self._get_graphql(
5369                query=graphql_queries.GET_MARKET_TRENDS_UNIVERSES_QUERY, variables={}
5370            )
5371            data = resp["data"]["getMarketTrendsUniverses"]
5372        except Exception:
5373            raise BoostedAPIException(f"Failed to load market trends universes mapping")
5374
5375        universe_name_to_id = {u["name"]: u["id"] for u in data}
5376        universe_ids = [universe_name_to_id[u.value] for u in stock_universes]
5377        try:
5378            resp = self._get_graphql(
5379                query=graphql_queries.GENERATE_THEME_QUERY,
5380                variables={"input": {"themeName": theme_name, "universeIds": universe_ids}},
5381            )
5382            data = resp["data"]["generateTheme"]
5383        except Exception:
5384            raise BoostedAPIException(f"Failed to generate theme: {theme_name}")
5385
5386        if not data["success"]:
5387            raise BoostedAPIException(f"Failed to generate theme: {theme_name}")
5388
5389        logger.info(
5390            f"Successfully generated theme: {theme_name}. The theme ID is {data['themeId']}"
5391        )
5392        return data["themeId"]
5393
5394    def _get_stock_universe_id(self, universe: ThemeUniverse) -> str:
5395        try:
5396            resp = self._get_graphql(
5397                query=graphql_queries.GET_MARKET_TRENDS_UNIVERSES_QUERY, variables={}
5398            )
5399            data = resp["data"]["getMarketTrendsUniverses"]
5400        except Exception:
5401            raise BoostedAPIException(f"Failed to load market trends universes mapping")
5402
5403        for u in data:
5404            if u["name"] == universe.value:
5405                universe_id = u["id"]
5406                return universe_id
5407
5408        raise BoostedAPIException(f"Failed to find universe: {universe.value}")
5409
5410    def get_themes_for_stock_universe(
5411        self,
5412        stock_universe: ThemeUniverse,
5413        start_date: Optional[BoostedDate] = None,
5414        end_date: Optional[BoostedDate] = None,
5415        language: Optional[Union[str, Language]] = None,
5416    ) -> List[Dict]:
5417        """Get all themes data for a particular stock universe
5418        (start_date, end_date) are used to calculate the theme importance for ranking purpose. If
5419        None, default to past 30 days
5420        Returns: A list of below dictionaries
5421        {
5422            themeId: str
5423            themeName: str
5424            themeImportance: float
5425            volatility: float
5426            positiveStockPerformance: float
5427            negativeStockPerformance: float
5428        }
5429        """
5430        translate = functools.partial(self.translate_text, language)
5431        # First get universe name and id mappings
5432        universe_id = self._get_stock_universe_id(stock_universe)
5433
5434        start_date_iso, end_date_iso = get_valid_iso_dates(start_date, end_date)
5435
5436        try:
5437            resp = self._get_graphql(
5438                query=graphql_queries.GET_THEMES,
5439                variables={
5440                    "type": "UNIVERSE",
5441                    "id": universe_id,
5442                    "startDate": start_date_iso,
5443                    "endDate": end_date_iso,
5444                    "deltaHorizon": "",  # not needed here
5445                },
5446            )
5447            data = resp["data"]["themes"]
5448        except Exception:
5449            raise BoostedAPIException(
5450                f"Failed to get themes for stock universe: {stock_universe.name}"
5451            )
5452
5453        for theme_data in data:
5454            theme_data["themeName"] = translate(theme_data["themeName"])
5455        return data
5456
5457    def get_themes_for_stock(
5458        self,
5459        isin: str,
5460        currency: Optional[str] = None,
5461        country: Optional[str] = None,
5462        start_date: Optional[BoostedDate] = None,
5463        end_date: Optional[BoostedDate] = None,
5464        language: Optional[Union[str, Language]] = None,
5465    ) -> List[Dict]:
5466        """Get all themes data for a particular stock
5467        (ISIN, currency, country) compose a unique identifier for a stock for us to map to GBI ID
5468        (start_date, end_date) are used to calculate the theme importance for ranking purpose. If
5469        None, default to past 30 days
5470
5471        Returns
5472        A list of below dictionaries
5473        {
5474            themeId: str
5475            themeName: str
5476            importanceScore: float
5477            similarityScore: float
5478            positiveThemeRelation: bool
5479            reason: String
5480        }
5481        """
5482        translate = functools.partial(self.translate_text, language)
5483        security_info = self.get_stock_mapping_alternatives(
5484            isin, country=country, currency=currency
5485        )
5486        gbi_id = security_info["stock_mapping"]["gbi_id"]
5487
5488        if (start_date and not end_date) or (end_date and not start_date):
5489            raise BoostedAPIException("Must provide both start and end dates or neither")
5490        elif not end_date and not start_date:
5491            end_date = datetime.date.today()
5492            start_date = end_date - datetime.timedelta(days=30)
5493            end_date = end_date.isoformat()
5494            start_date = start_date.isoformat()
5495        else:
5496            if isinstance(start_date, datetime.date):
5497                start_date = start_date.isoformat()
5498            if isinstance(end_date, datetime.date):
5499                end_date = end_date.isoformat()
5500
5501        try:
5502            resp = self._get_graphql(
5503                query=graphql_queries.GET_THEMES_FOR_STOCK_WITH_REASONS,
5504                variables={"gbiId": gbi_id, "startDate": start_date, "endDate": end_date},
5505            )
5506            data = resp["data"]["themesForStockWithReasons"]
5507        except Exception:
5508            raise BoostedAPIException(f"Failed to get themes for stock: {isin}")
5509
5510        for item in data:
5511            item["themeName"] = translate(item["themeName"])
5512            item["reason"] = translate(item["reason"])
5513        return data
5514
5515    def get_stock_news(
5516        self,
5517        time_horizon: NewsHorizon,
5518        isin: str,
5519        currency: Optional[str] = None,
5520        country: Optional[str] = None,
5521        language: Optional[Union[str, Language]] = None,
5522    ) -> Dict:
5523        """
5524        The API to get a stock's news summary for a given time horizon, the topics summarized by
5525        these news and the corresponding news to these topics
5526        Returns
5527        -------
5528        A nested dictionary in the following format:
5529        {
5530            summary: str
5531            topics: [
5532                {
5533                    topicId: str
5534                    topicLabel: str
5535                    topicDescription: str
5536                    topicPolarity: str
5537                    newsItems: [
5538                        {
5539                            newsId: str
5540                            headline: str
5541                            url: str
5542                            summary: str
5543                            source: str
5544                            publishedAt: str
5545                        }
5546                    ]
5547                }
5548            ]
5549            other_news_count: int
5550        }
5551        """
5552        translate = functools.partial(self.translate_text, language)
5553        security_info = self.get_stock_mapping_alternatives(
5554            isin, country=country, currency=currency
5555        )
5556        gbi_id = security_info["stock_mapping"]["gbi_id"]
5557
5558        try:
5559            resp = self._get_graphql(
5560                query=graphql_queries.GET_STOCK_NEWS_QUERY,
5561                variables={"gbiId": gbi_id, "deltaHorizon": time_horizon.value},
5562            )
5563            data = resp["data"]
5564        except Exception:
5565            raise BoostedAPIException(f"Failed to get themes for stock: {isin}")
5566
5567        outputs: Dict[str, Any] = {}
5568        outputs["summary"] = translate(data["getStockNewsSummary"]["summary"])
5569        # Return the top 10 topics
5570        outputs["topics"] = data["getStockNewsTopics"]["topics"][:10]
5571
5572        for topic in outputs["topics"]:
5573            topic["topicLabel"] = translate(topic["topicLabel"])
5574            topic["topicDescription"] = translate(topic["topicDescription"])
5575
5576        other_news_count = 0
5577        for source_count in data["getStockNewsSummary"]["sourceCounts"]:
5578            other_news_count += source_count["count"]
5579
5580        for topic in outputs["topics"]:
5581            other_news_count -= len(topic["newsItems"])
5582
5583        outputs["other_news_count"] = other_news_count
5584
5585        return outputs
5586
5587    def get_theme_details(
5588        self,
5589        theme_id: str,
5590        universe: ThemeUniverse,
5591        language: Optional[Union[str, Language]] = None,
5592    ) -> Dict[str, Any]:
5593        translate = functools.partial(self.translate_text, language)
5594        universe_id = self._get_stock_universe_id(universe)
5595        date = datetime.date.today()
5596        prev_date = date - datetime.timedelta(days=30)
5597        result = self._get_graphql(
5598            query=graphql_queries.GET_THEME_DEEPDIVE_DETAILS,
5599            variables={
5600                "deltaHorizon": "1W",
5601                "startDate": prev_date.strftime("%Y-%m-%d"),
5602                "endDate": date.strftime("%Y-%m-%d"),
5603                "id": universe_id,
5604                "themeId": theme_id,
5605                "type": "UNIVERSE",
5606            },
5607            error_msg_prefix="Failed to get theme details",
5608        )["data"]["marketThemes"]
5609
5610        gbi_id_stock_data_map: Dict[int, Dict] = {}
5611
5612        stocks = []
5613        for stock_info in result["stockInfos"]:
5614            gbi_id_stock_data_map[stock_info["gbiId"]] = stock_info["security"]
5615            stocks.append(
5616                {
5617                    "isin": stock_info["security"]["isin"],
5618                    "name": stock_info["security"]["name"],
5619                    "reason": translate(stock_info["polarityReasonScores"]["reason"]),
5620                    "positive_theme_relation": stock_info["polarityReasonScores"][
5621                        "positiveThemeRelation"
5622                    ],
5623                    "theme_stock_impact_score": stock_info["polarityReasonScores"][
5624                        "similarityScore"
5625                    ],
5626                }
5627            )
5628
5629        impacts = []
5630        for impact in result["impactInfos"]:
5631            articles = [
5632                {
5633                    "title": newsitem["headline"],
5634                    "url": newsitem["url"],
5635                    "source": newsitem["source"],
5636                    "publish_date": newsitem["publishedAt"],
5637                }
5638                for newsitem in impact["newsItems"]
5639            ]
5640
5641            impact_stocks = []
5642            for impact_stock_data in impact["stocks"]:
5643                stock_metadata = gbi_id_stock_data_map[impact_stock_data["gbiId"]]
5644                impact_stocks.append(
5645                    {
5646                        "isin": stock_metadata["isin"],
5647                        "name": stock_metadata["name"],
5648                        "positive_impact_relation": impact_stock_data["positiveThemeRelation"],
5649                    }
5650                )
5651
5652            impact_dict = {
5653                "impact_name": translate(impact["impactName"]),
5654                "impact_description": translate(impact["impactDescription"]),
5655                "impact_score": impact["impactScore"],
5656                "articles": articles,
5657                "impact_stocks": impact_stocks,
5658            }
5659            impacts.append(impact_dict)
5660
5661        developments = []
5662        for dev in result["themeDevelopments"]:
5663            developments.append(
5664                {
5665                    "name": dev["label"],
5666                    "article_count": dev["articleCount"],
5667                    "date": parser.parse(dev["date"]).date(),
5668                    "description": dev["description"],
5669                    "is_major_development": dev["isMajorDevelopment"],
5670                    "sentiment": dev["sentiment"],
5671                    "news": [
5672                        {
5673                            "headline": entry["headline"],
5674                            "published_at": parser.parse(entry["publishedAt"]),
5675                            "source": entry["source"],
5676                            "url": entry["url"],
5677                        }
5678                        for entry in dev["news"]
5679                    ],
5680                }
5681            )
5682
5683        developments = sorted(developments, key=lambda d: d["date"], reverse=True)
5684
5685        output = {
5686            "theme_name": translate(result["themeName"]),
5687            "theme_summary": translate(result["themeDescription"]),
5688            "impacts": impacts,
5689            "stocks": stocks,
5690            "developments": developments,
5691        }
5692        return output
5693
5694    def get_all_theme_metadata(
5695        self, language: Optional[Union[str, Language]] = None
5696    ) -> List[Dict[str, Any]]:
5697        translate = functools.partial(self.translate_text, language)
5698        result = self._get_graphql(
5699            graphql_queries.GET_ALL_THEMES,
5700            variables={"universeIds": None},
5701            error_msg_prefix="Failed to fetch all themes metadata",
5702        )
5703
5704        try:
5705            resp = self._get_graphql(
5706                query=graphql_queries.GET_MARKET_TRENDS_UNIVERSES_QUERY, variables={}
5707            )
5708            data = resp["data"]["getMarketTrendsUniverses"]
5709        except Exception:
5710            raise BoostedAPIException(f"Failed to load market trends universes mapping")
5711        universe_id_to_name = {u["id"]: u["name"] for u in data}
5712
5713        outputs = []
5714        for theme in result["data"]["getAllThemesForUser"]:
5715            # map universe ID to universe ticker
5716            universe_tickers = []
5717            for universe_id in theme["universeIds"]:
5718                if universe_id in universe_id_to_name:  # don't support unlisted universes - skip
5719                    universe_name = universe_id_to_name[universe_id]
5720                    ticker = ThemeUniverse.get_ticker_from_name(universe_name)
5721                    if ticker:
5722                        universe_tickers.append(ticker)
5723
5724            outputs.append(
5725                {
5726                    "theme_id": theme["themeId"],
5727                    "theme_name": translate(theme["themeName"]),
5728                    "universes": universe_tickers,
5729                }
5730            )
5731
5732        return outputs
5733
5734    def get_earnings_impacting_security(
5735        self,
5736        isin: str,
5737        currency: Optional[str] = None,
5738        country: Optional[str] = None,
5739        language: Optional[Union[str, Language]] = None,
5740    ) -> List[Dict[str, Any]]:
5741        translate = functools.partial(self.translate_text, language)
5742        date = datetime.date.today().strftime("%Y-%m-%d")
5743        company_data = self.getGbiIdFromIdentCountryCurrencyDate(
5744            ident_country_currency_dates=[
5745                DateIdentCountryCurrency(
5746                    date=date, identifier=isin, country=country, currency=currency
5747                )
5748            ]
5749        )
5750        try:
5751            gbi_id = company_data[0].gbi_id
5752        except Exception:
5753            raise BoostedAPIException(f"ISIN {isin} not found")
5754
5755        result = self._get_graphql(
5756            graphql_queries.EARNINGS_IMPACTS_CALENDAR_FOR_STOCK,
5757            variables={"date": date, "days": 180, "gbiId": gbi_id},
5758            error_msg_prefix="Failed to fetch earnings impacts data for stock",
5759        )
5760        earnings_events = result["data"]["earningsCalendarForStock"]
5761        output_events = []
5762        for event in earnings_events:
5763            if not event["impactedCompanies"]:
5764                continue
5765            fixed_event = {
5766                "event_date": event["eventDate"],
5767                "company_name": event["security"]["name"],
5768                "symbol": event["security"]["symbol"],
5769                "isin": event["security"]["isin"],
5770                "impact_reason": translate(event["impactedCompanies"][0]["reason"]),
5771            }
5772            output_events.append(fixed_event)
5773
5774        return output_events
5775
5776    def get_earnings_insights_for_stocks(
5777        self, isin: str, currency: Optional[str] = None, country: Optional[str] = None
5778    ) -> Dict[str, Any]:
5779        date = datetime.date.today().strftime("%Y-%m-%d")
5780        company_data = self.getGbiIdFromIdentCountryCurrencyDate(
5781            ident_country_currency_dates=[
5782                DateIdentCountryCurrency(
5783                    date=date, identifier=isin, country=country, currency=currency
5784                )
5785            ]
5786        )
5787        gbi_id_isin_map = {
5788            company.gbi_id: company.isin_info.identifier
5789            for company in company_data
5790            if company is not None
5791        }
5792        try:
5793            resp = self._get_graphql(
5794                query=graphql_queries.GET_EARNINGS_INSIGHTS_SUMMARIES,
5795                variables={"gbiIds": list(gbi_id_isin_map.keys())},
5796            )
5797            # list of objects with gbi id and data
5798            summaries = resp["data"]["getEarningsSummaries"]
5799            resp = self._get_graphql(
5800                query=graphql_queries.GET_EARNINGS_COMPARISONS,
5801                variables={"gbiIds": list(gbi_id_isin_map.keys())},
5802            )
5803            # list of objects with gbi id and data
5804            comparison = resp["data"]["getLatestEarningsChanges"]
5805        except Exception:
5806            raise BoostedAPIException(f"Failed to earnings insights data")
5807
5808        if not summaries:
5809            raise BoostedAPIException(
5810                (
5811                    f"Failed to find earnings insights data for {isin}"
5812                    ", please try with another security"
5813                )
5814            )
5815
5816        output: Dict[str, Any] = {}
5817        reports = sorted(summaries[0]["reports"], key=lambda r: r["date"], reverse=True)
5818        current_report = reports[0]
5819
5820        def is_aligned_formatter(acc: Tuple[List, List], cur: Dict[str, Any]):
5821            if cur["isAligned"]:
5822                acc[0].append({k: cur[k] for k in cur if k != "isAligned"})
5823            else:
5824                acc[1].append({k: cur[k] for k in cur if k != "isAligned"})
5825            return acc
5826
5827        current_report_common_remarks: Union[List[Dict[str, Any]], List]
5828        current_report_dropped_remarks: Union[List[Dict[str, Any]], List]
5829        current_report_common_remarks, current_report_dropped_remarks = functools.reduce(
5830            is_aligned_formatter, current_report["details"], ([], [])
5831        )
5832        prev_report_common_remarks: Union[List[Dict[str, Any]], List]
5833        prev_report_new_remarks: Union[List[Dict[str, Any]], List]
5834        prev_report_common_remarks, prev_report_new_remarks = functools.reduce(
5835            is_aligned_formatter, current_report["details"], ([], [])
5836        )
5837
5838        output["earnings_report"] = {
5839            "release_date": datetime.datetime.strptime(current_report["date"], "%Y-%m-%d").date(),
5840            "quarter": current_report["quarter"],
5841            "year": current_report["year"],
5842            "details": [
5843                {
5844                    "header": detail_obj["header"],
5845                    "detail": detail_obj["detail"],
5846                    "sentiment": detail_obj["sentiment"],
5847                }
5848                for detail_obj in current_report["details"]
5849            ],
5850            "call_summary": current_report["highlights"],
5851            "common_remarks": current_report_common_remarks,
5852            "dropped_remarks": current_report_dropped_remarks,
5853            "qa_summary": current_report["qaHighlights"],
5854            "qa_details": current_report["qaDetails"],
5855        }
5856        prev_report = summaries[0]["reports"][1]
5857        output["prior_earnings_report"] = {
5858            "release_date": datetime.datetime.strptime(prev_report["date"], "%Y-%m-%d").date(),
5859            "quarter": prev_report["quarter"],
5860            "year": prev_report["year"],
5861            "details": [
5862                {
5863                    "header": detail_obj["header"],
5864                    "detail": detail_obj["detail"],
5865                    "sentiment": detail_obj["sentiment"],
5866                }
5867                for detail_obj in prev_report["details"]
5868            ],
5869            "call_summary": prev_report["highlights"],
5870            "common_remarks": prev_report_common_remarks,
5871            "new_remarks": prev_report_new_remarks,
5872            "qa_summary": prev_report["qaHighlights"],
5873            "qa_details": prev_report["qaDetails"],
5874        }
5875
5876        if not comparison:
5877            output["report_comparison"] = []
5878        else:
5879            output["report_comparison"] = comparison[0]["changes"]
5880
5881        return output
5882
5883    def get_portfolio_inference_status(self, portfolio_id: str, inference_date: str) -> dict:
5884        url = f"{self.base_uri}/api/inference/status/{portfolio_id}/{inference_date}"
5885        headers = {"Authorization": "ApiKey " + self.api_key}
5886        res = requests.get(url, headers=headers)
5887
5888        if not res.ok:
5889            error_msg = self._try_extract_error_code(res)
5890            logger.error(error_msg)
5891            raise BoostedAPIException(
5892                f"Failed to get portfolio inference status, portfolio_id={portfolio_id}, "
5893                f"inference_date={inference_date}: {error_msg}"
5894            )
5895
5896        data = res.json()
5897        return data
5898
5899    def delete_portfolios(self, model_to_portfolios: Dict[str, List[str]]) -> None:
5900        """
5901        Deletes a list of portfolios
5902
5903        Args:
5904            model_to_portfolios: Mapping from model_id -> list of corresponding portfolios to delete
5905        """
5906        for model_id, portfolios in model_to_portfolios.items():
5907            for portfolio_id in portfolios:
5908                url = self.base_uri + "/api/models/{0}/constraints/{1}/delete".format(
5909                    model_id, portfolio_id
5910                )
5911                headers = {"Authorization": "ApiKey " + self.api_key}
5912                res = requests.put(url, headers=headers, **self._request_params)
5913                if not res.ok:
5914                    error_msg = self._try_extract_error_code(res)
5915                    logger.error(error_msg)
5916                    raise BoostedAPIException("Failed to delete portfolios: {0}.".format(error_msg))
BoostedClient( api_key, override_uri=None, debug=False, proxy=None, disable_verify_ssl=False)
 86    def __init__(
 87        self, api_key, override_uri=None, debug=False, proxy=None, disable_verify_ssl=False
 88    ):
 89        """
 90        Parameters
 91        ----------
 92        api_key: str
 93            Your API key provided by the Boosted application.  See your profile
 94            to generate a new key.
 95        proxy: str
 96            Your organization may require the use of a proxy for access.
 97            The address of a HTTPS proxy in the format of <address>:<port>.
 98            Examples are "123.456.789:123" or "my.proxy.com:123".
 99            Do not prepend with "https://".
100        disable_verify_ssl: bool
101            Your networking setup may be behind a firewall which performs SSL
102            inspection. Either set the REQUESTS_CA_BUNDLE environment variable
103            to point to the location of a custom certificate bundle, or set this
104            parameter to True to disable SSL verification as a workaround.
105        """
106        if override_uri is None:
107            self.base_uri = g_boosted_api_url
108        else:
109            self.base_uri = override_uri
110        self.api_key = api_key
111        self.debug = debug
112        self._request_params: Dict = {}
113        if debug:
114            logger.setLevel(logging.DEBUG)
115        else:
116            logger.setLevel(logging.INFO)
117        if proxy is not None:
118            self._request_params["proxies"] = {"https": proxy}
119        if disable_verify_ssl:
120            self._request_params["verify"] = False

Parameters

api_key: str Your API key provided by the Boosted application. See your profile to generate a new key. proxy: str Your organization may require the use of a proxy for access. The address of a HTTPS proxy in the format of

:. Examples are "123.456.789:123" or "my.proxy.com:123". Do not prepend with "https://". disable_verify_ssl: bool Your networking setup may be behind a firewall which performs SSL inspection. Either set the REQUESTS_CA_BUNDLE environment variable to point to the location of a custom certificate bundle, or set this parameter to True to disable SSL verification as a workaround.

api_key
debug
def translate_text( self, language: Union[boosted.api.api_type.Language, str, NoneType], text: str) -> str:
240    def translate_text(self, language: Optional[Union[Language, str]], text: str) -> str:
241        if not language or language == Language.ENGLISH:
242            # By default, do not translate English
243            return text
244
245        params = {"text": text, "langCode": language}
246        url = self.base_uri + "/api/translate/translate-text"
247        headers = {"Authorization": "ApiKey " + self.api_key}
248        logger.info("Translating text...")
249        res = requests.post(url, json=params, headers=headers, **self._request_params)
250        try:
251            result = res.json()["translatedText"]
252        except Exception:
253            raise BoostedAPIException("Error translating text")
254        return result
def query_dataset(self, dataset_id):
256    def query_dataset(self, dataset_id):
257        url = self.base_uri + "/api/datasets/{0}".format(dataset_id)
258        headers = {"Authorization": "ApiKey " + self.api_key}
259        res = requests.get(url, headers=headers, **self._request_params)
260        if res.ok:
261            return res.json()
262        else:
263            error_msg = self._try_extract_error_code(res)
264            logger.error(error_msg)
265            raise BoostedAPIException("Failed to query dataset: {0}.".format(error_msg))
def query_namespace_dataset_id(self, namespace, data_type):
267    def query_namespace_dataset_id(self, namespace, data_type):
268        url = self.base_uri + f"/api/custom-security-dataset/{namespace}/{data_type}"
269        headers = {"Authorization": "ApiKey " + self.api_key}
270        res = requests.get(url, headers=headers, **self._request_params)
271        if res.ok:
272            return res.json()["result"]["id"]
273        else:
274            if res.status_code != 404:
275                error_msg = self._try_extract_error_code(res)
276                logger.error(error_msg)
277                raise BoostedAPIException("Failed to query dataset: {0}.".format(error_msg))
278            else:
279                return None
def export_global_data( self, dataset_id, start=datetime.date(1999, 10, 14), end=datetime.date(2024, 10, 7), timeout=600):
281    def export_global_data(
282        self,
283        dataset_id,
284        start=(datetime.date.today() - timedelta(days=365 * 25)),
285        end=datetime.date.today(),
286        timeout=600,
287    ):
288        query_info = self.query_dataset(dataset_id)
289        if DataSetType[query_info["type"]] != DataSetType.GLOBAL:
290            raise BoostedAPIException(
291                f"Incorrect dataset type: {query_info['type']}" f" - Expected {DataSetType.GLOBAL}"
292            )
293        return self.export_data(dataset_id, start, end, timeout)
def export_independent_data( self, dataset_id, start=datetime.date(1999, 10, 14), end=datetime.date(2024, 10, 7), timeout=600):
295    def export_independent_data(
296        self,
297        dataset_id,
298        start=(datetime.date.today() - timedelta(days=365 * 25)),
299        end=datetime.date.today(),
300        timeout=600,
301    ):
302        query_info = self.query_dataset(dataset_id)
303        if DataSetType[query_info["type"]] != DataSetType.STRATEGY:
304            raise BoostedAPIException(
305                f"Incorrect dataset type: {query_info['type']}"
306                f" - Expected {DataSetType.STRATEGY}"
307            )
308        return self.export_data(dataset_id, start, end, timeout)
def export_dependent_data(self, dataset_id, start=None, end=None, timeout=600):
310    def export_dependent_data(
311        self,
312        dataset_id,
313        start=None,
314        end=None,
315        timeout=600,
316    ):
317        query_info = self.query_dataset(dataset_id)
318        if DataSetType[query_info["type"]] != DataSetType.STOCK:
319            raise BoostedAPIException(
320                f"Incorrect dataset type: {query_info['type']}" f" - Expected {DataSetType.STOCK}"
321            )
322
323        valid_date_range = self.getDatasetDates(dataset_id)
324        validStart = valid_date_range["validFrom"]
325        validEnd = valid_date_range["validTo"]
326
327        if start is None:
328            logger.info("Since no start date provided, starting from {0}.".format(validStart))
329            start = validStart
330        if end is None:
331            logger.info("Since no end date provided, ending at {0}.".format(validEnd))
332            end = validEnd
333        start = self.__to_date_obj(start)
334        end = self.__to_date_obj(end)
335        if start < validStart:
336            logger.info("Data does not exist before {0}.".format(validStart))
337            logger.info("Starting from {0}.".format(validStart))
338            start = validStart
339        if end > validEnd:
340            logger.info("Data does not exist after {0}.".format(validEnd))
341            logger.info("Ending at {0}.".format(validEnd))
342            end = validEnd
343        validate_start_and_end_dates(start, end)
344
345        logger.info("Data exists from {0} to {1}.".format(start, end))
346        request_url = "/api/datasets/" + dataset_id + "/export-data"
347        headers = {"Authorization": "ApiKey " + self.api_key}
348
349        data_chunks = []
350        chunk_size_days = 90
351        while start <= end:
352            chunk_end = start + timedelta(days=chunk_size_days)
353            if chunk_end > end:
354                chunk_end = end
355
356            logger.info("Requesting start={0} end={1}.".format(start, chunk_end))
357            params = {"start": self.__iso_format(start), "end": self.__iso_format(chunk_end)}
358            logger.debug("URL={0}, headers={1}, params={2}".format(request_url, headers, params))
359
360            res = requests.get(
361                self.base_uri + request_url,
362                headers=headers,
363                params=params,
364                timeout=timeout,
365                **self._request_params,
366            )
367
368            if res.ok:
369                buf = io.StringIO(res.text)
370                df = pd.read_csv(buf, index_col=0, parse_dates=True)
371                if "price" in df.columns:
372                    df = df.drop("price", axis=1)
373                data_chunks.append(df)
374            else:
375                error_msg = self._try_extract_error_code(res)
376                logger.error(error_msg)
377                raise BoostedAPIException("Failed to query dataset: {0}.".format(error_msg))
378
379            start = start + timedelta(days=chunk_size_days + 1)
380
381        return pd.concat(data_chunks)
def export_custom_security_data( self, dataset_id, start=datetime.date(1999, 10, 14), end=datetime.date(2024, 10, 7), timeout=600):
383    def export_custom_security_data(
384        self,
385        dataset_id,
386        start=(date.today() - timedelta(days=365 * 25)),
387        end=date.today(),
388        timeout=600,
389    ):
390        query_info = self.query_dataset(dataset_id)
391        if DataSetType[query_info["type"]] != DataSetType.SECURITIES_DAILY:
392            raise BoostedAPIException(
393                f"Incorrect dataset type: {query_info['type']}"
394                f" - Expected {DataSetType.SECURITIES_DAILY}"
395            )
396        return self.export_data(dataset_id, start, end, timeout)
def export_data( self, dataset_id, start=datetime.date(1999, 10, 14), end=datetime.date(2024, 10, 7), timeout=600):
398    def export_data(
399        self,
400        dataset_id,
401        start=(datetime.date.today() - timedelta(days=365 * 25)),
402        end=datetime.date.today(),
403        timeout=600,
404    ):
405        logger.info("Requesting start={0} end={1}.".format(start, end))
406        request_url = "/api/datasets/" + dataset_id + "/export-data"
407        headers = {"Authorization": "ApiKey " + self.api_key}
408        start = self.__iso_format(start)
409        end = self.__iso_format(end)
410        params = {"start": start, "end": end}
411        logger.debug("URL={0}, headers={1}, params={2}".format(request_url, headers, params))
412        res = requests.get(
413            self.base_uri + request_url,
414            headers=headers,
415            params=params,
416            timeout=timeout,
417            **self._request_params,
418        )
419        if res.ok or self._check_status_code(res):
420            buf = io.StringIO(res.text)
421            df = pd.read_csv(buf, index_col=0, parse_dates=True)
422            if "price" in df.columns:
423                df = df.drop("price", axis=1)
424            return df
425        else:
426            error_msg = self._try_extract_error_code(res)
427            logger.error(error_msg)
428            raise BoostedAPIException("Failed to query dataset: {0}.".format(error_msg))
def get_inference( self, model_id, inference_date=datetime.date(2024, 10, 7), block=False, timeout_minutes=30):
445    def get_inference(
446        self, model_id, inference_date=datetime.date.today(), block=False, timeout_minutes=30
447    ):
448        start_time = datetime.datetime.now()
449        while True:
450            for numRetries in range(3):
451                res, status = self._get_inference(model_id, inference_date)
452                if res is not None:
453                    continue
454                else:
455                    if status == Status.FAIL:
456                        return Status.FAIL
457                    logger.info("Retrying...")
458            if res is None:
459                logger.error("Max retries reached.  Request failed.")
460                return None
461
462            json_data = res.json()
463            if "result" in json_data.keys():
464                if json_data["result"]["status"] == "RUNNING":
465                    still_running = True
466                    if not block:
467                        logger.warn("Inference job is still running.")
468                        return None
469                    else:
470                        logger.info(
471                            "Inference job is still running.  Time elapsed={0}.".format(
472                                datetime.datetime.now() - start_time
473                            )
474                        )
475                        time.sleep(10)
476                else:
477                    still_running = False
478
479                if not still_running and json_data["result"]["status"] == "COMPLETE":
480                    csv = json_data["result"]["signals"]
481                    logger.info(json_data["result"])
482                    if self._check_status_code(res, isInference=True):
483                        logger.info(
484                            "Total run time = {0}.".format(datetime.datetime.now() - start_time)
485                        )
486                        return csv
487            else:
488                if "errors" in json_data.keys():
489                    logger.error(json_data["errors"])
490                else:
491                    logger.error("Error getting inference for date {0}.".format(inference_date))
492                return None
493            if (datetime.datetime.now() - start_time).total_seconds() / 60.0 > timeout_minutes:
494                logger.error("Timeout waiting for job completion.")
495                return None
def createDataset(self, schema):
497    def createDataset(self, schema):
498        request_url = "/api/datasets"
499        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
500        s = json.dumps(schema)
501        logger.info("Creating dataset with schema " + s)
502        res = requests.post(
503            self.base_uri + request_url, data=s, headers=headers, **self._request_params
504        )
505        if res.ok:
506            return res.json()["result"]
507        else:
508            raise BoostedAPIException("Dataset creation failed.")
def create_custom_namespace_dataset(self, namespace, schema):
510    def create_custom_namespace_dataset(self, namespace, schema):
511        request_url = f"/api/custom-security-dataset/{namespace}"
512        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
513        s = json.dumps(schema)
514        logger.info("Creating dataset with schema " + s)
515        res = requests.post(
516            self.base_uri + request_url, data=s, headers=headers, **self._request_params
517        )
518        if res.ok:
519            return res.json()["result"]
520        else:
521            raise BoostedAPIException("Dataset creation failed.")
def getUniverse(self, modelId, date=None):
523    def getUniverse(self, modelId, date=None):
524        if date is not None:
525            url = "/api/models/{0}/universe/{1}".format(modelId, self.__iso_format(date))
526            logger.info("Getting universe for date: {0}.".format(date))
527        else:
528            url = "/api/models/{0}/universe/".format(modelId)
529        headers = {"Authorization": "ApiKey " + self.api_key}
530        res = requests.get(self.base_uri + url, headers=headers, **self._request_params)
531        if res.ok:
532            buf = io.StringIO(res.text)
533            df = pd.read_csv(buf, index_col=0, parse_dates=True)
534            return df
535        else:
536            error = self._try_extract_error_code(res)
537            logger.error(
538                "There was a problem getting this universe or model ID: {0}.".format(error)
539            )
540            raise BoostedAPIException("Failed to get universe: {0}".format(error))
def add_custom_security_namespace_members( self, namespace, members: Union[pandas.core.frame.DataFrame, str]) -> Tuple[pandas.core.frame.DataFrame, str]:
542    def add_custom_security_namespace_members(
543        self, namespace, members: Union[pandas.DataFrame, str]
544    ) -> Tuple[pandas.DataFrame, str]:
545        url = self.base_uri + "/api/synthetic-datasets/{0}/generate".format(namespace)
546        headers = {"Authorization": "ApiKey " + self.api_key}
547        headers["Content-Type"] = "application/json"
548        logger.info("Adding custom security namespace for namespace: {0}".format(namespace))
549        strbuf = None
550        if isinstance(members, pandas.DataFrame):
551            df = members
552            df_canon = df.rename(columns={_: to_camel_case(_) for _ in df.columns})
553            canon_cols = ["Currency", "Symbol", "Country", "Name"]
554            if set(canon_cols).difference(df_canon.columns):
555                raise BoostedAPIException(f"Expected columns: {canon_cols}")
556            df_canon = df_canon.loc[:, canon_cols]
557            buf = io.StringIO()
558            df_canon.to_json(buf, orient="records")
559            strbuf = buf.getvalue()
560        elif isinstance(members, str):
561            strbuf = members
562        else:
563            raise BoostedAPIException(f"Unsupported members argument type: {type(members)}")
564        res = requests.post(url, data=strbuf, headers=headers, **self._request_params)
565        if res.ok:
566            res_obj = res.json()
567            res_df = pandas.Series(res_obj["generatedISIN"]).to_frame()
568            res_df.index.name = "Symbol"
569            res_df.columns = ["ISIN"]
570            logger.info("Add to custom security namespace successful.")
571            if "warnings" in res_obj:
572                logger.info("Warnings: {0}.".format(res.json()["warnings"]))
573                return res_df, res.json()["warnings"]
574            else:
575                return res_df, "No warnings."
576        else:
577            error_msg = self._try_extract_error_code(res)
578            raise BoostedAPIException("Failed to get universe: {0}.".format(error_msg))
def updateUniverse(self, modelId, universe_df, date=datetime.date(2024, 10, 8)):
580    def updateUniverse(self, modelId, universe_df, date=datetime.date.today() + timedelta(1)):
581        date = self.__iso_format(date)
582        url = self.base_uri + "/api/models/{0}/universe/{1}".format(modelId, date)
583        headers = {"Authorization": "ApiKey " + self.api_key}
584        logger.info("Updating universe for date {0}.".format(date))
585        if isinstance(universe_df, pd.core.frame.DataFrame):
586            buf = io.StringIO()
587            universe_df.to_csv(buf)
588            target = ("uploaded_universe.csv", buf.getvalue(), "text/csv")
589            files_req = {}
590            files_req["universe"] = target
591            res = requests.post(url, files=files_req, headers=headers, **self._request_params)
592        elif isinstance(universe_df, str):
593            target = ("uploaded_universe.csv", universe_df, "text/csv")
594            files_req = {}
595            files_req["universe"] = target
596            res = requests.post(url, files=files_req, headers=headers, **self._request_params)
597        else:
598            raise BoostedAPIException("Expected CSV as str or Pandas DataFrame.")
599        if res.ok:
600            logger.info("Universe update successful.")
601            if "warnings" in res.json():
602                logger.info("Warnings: {0}.".format(res.json()["warnings"]))
603                return res.json()["warnings"]
604            else:
605                return "No warnings."
606        else:
607            error_msg = self._try_extract_error_code(res)
608            raise BoostedAPIException("Failed to get universe: {0}.".format(error_msg))
def create_universe( self, universe: Union[pandas.core.frame.DataFrame, str], name: str, description: str) -> List[str]:
610    def create_universe(
611        self, universe: Union[pd.DataFrame, str], name: str, description: str
612    ) -> List[str]:
613        PRESENT = "PRESENT"
614        ANY = "ANY"
615        EARLIST_DATE = "1900-01-01"
616        LATEST_DATE = "4000-01-01"
617
618        if isinstance(universe, (str, bytes, os.PathLike)):
619            universe = pd.read_csv(universe)
620
621        universe.columns = universe.columns.str.lower()
622
623        # Clients are free to leave out data. Fill in some defaults here.
624        if "from" not in universe.columns:
625            universe["from"] = EARLIST_DATE
626        if "to" not in universe.columns:
627            universe["to"] = LATEST_DATE
628        if "currency" not in universe.columns:
629            universe["currency"] = ANY
630        if "country" not in universe.columns:
631            universe["country"] = ANY
632        if "isin" not in universe.columns:
633            universe["isin"] = None
634        if "symbol" not in universe.columns:
635            universe["symbol"] = None
636
637        # to prevent conflicts with python keywords
638        universe.rename(columns={"from": "from_date", "to": "to_date"}, inplace=True)
639
640        universe = universe.replace({np.nan: None})
641        security_country_currency_date_list = []
642        for i, r in enumerate(universe.itertuples()):
643            id_type = ColumnSubRole.ISIN
644            identifier = r.isin
645
646            if identifier is None:
647                id_type = ColumnSubRole.SYMBOL
648                identifier = str(r.symbol)
649
650            # if identifier is still None, it means that there is no ISIN or
651            # SYMBOL for this row, in which case we throw an error
652            if identifier is None:
653                raise BoostedAPIException(
654                    (
655                        f"Missing identifier column in universe row {i + 1}"
656                        " should contain ISIN or Symbol"
657                    )
658                )
659
660            security_country_currency_date_list.append(
661                DateIdentCountryCurrency(
662                    date=r.from_date or EARLIST_DATE,
663                    identifier=identifier,
664                    country=r.country or ANY,
665                    currency=r.currency or ANY,
666                    id_type=id_type,
667                )
668            )
669
670        gbi_id_objs = self.getGbiIdFromIdentCountryCurrencyDate(security_country_currency_date_list)
671
672        security_list = []
673        for i, r in enumerate(universe.itertuples()):
674            # if we have a None here, we failed to map to a gbi id
675            if gbi_id_objs[i] is None:
676                raise BoostedAPIException(f"Unable to map row: {tuple(r)}")
677
678            security_list.append(
679                {
680                    "stockId": gbi_id_objs[i].gbi_id,
681                    "fromZ": r.from_date or EARLIST_DATE,
682                    "toZ": LATEST_DATE if r.to_date in (PRESENT, None) else r.to_date,
683                    "removal": False,
684                    "source": "UPLOAD",
685                }
686            )
687
688        url = self.base_uri + "/api/template-universe/save"
689        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
690        req = {"name": name, "description": description, "modificationDaos": security_list}
691
692        res = requests.post(url, json=req, headers=headers, **self._request_params)
693        self._check_ok_or_err_with_msg(res, "Failed to create universe")
694
695        if "warnings" in res.json():
696            logger.info("Warnings: {0}.".format(res.json()["warnings"]))
697            return res.json()["warnings"].splitlines()
698        else:
699            return []
def validate_dataframe(self, df):
701    def validate_dataframe(self, df):
702        if not isinstance(df, pd.core.frame.DataFrame):
703            logger.error("Dataset must be of type Dataframe.")
704            return False
705        if type(df.index) != pd.core.indexes.datetimes.DatetimeIndex:
706            logger.error("Index must be DatetimeIndex.")
707            return False
708        if len(df.columns) == 0:
709            logger.error("No feature columns exist.")
710            return False
711        if len(df) == 0:
712            logger.error("No rows exist.")
713        return True
def get_dataset_schema(self, dataset_id):
715    def get_dataset_schema(self, dataset_id):
716        url = self.base_uri + "/api/datasets/{0}/schema".format(dataset_id)
717        headers = {"Authorization": "ApiKey " + self.api_key}
718        res = requests.get(url, headers=headers, **self._request_params)
719        if res.ok:
720            json_schema = res.json()
721        else:
722            error_msg = self._try_extract_error_code(res)
723            logger.error(error_msg)
724            raise BoostedAPIException("Failed to query dataset: {0}.".format(error_msg))
725        return DataSetConfig.fromDict(json_schema["result"])
def add_custom_security_daily_dataset(self, namespace, dataset, schema=None, timeout=600, block=True):
727    def add_custom_security_daily_dataset(
728        self, namespace, dataset, schema=None, timeout=600, block=True
729    ):
730        result = self.add_custom_security_daily_dataset_with_warnings(
731            namespace, dataset, schema, timeout, block
732        )
733        return result["dataset_id"]
def add_custom_security_daily_dataset_with_warnings( self, namespace, dataset, schema=None, timeout=600, block=True, no_exception_on_chunk_error=False):
735    def add_custom_security_daily_dataset_with_warnings(
736        self,
737        namespace,
738        dataset,
739        schema=None,
740        timeout=600,
741        block=True,
742        no_exception_on_chunk_error=False,
743    ):
744        dataset_type = DataSetType.SECURITIES_DAILY
745        dsid = self.query_namespace_dataset_id(namespace, dataset_type)
746
747        if not self.validate_dataframe(dataset):
748            logger.error("dataset failed validation.")
749            return None
750
751        if dsid is None:
752            # create the dataset if not exist.
753            schema = infer_dataset_schema(
754                "custom_security_daily", dataset, dataset_type, infer_from_column_names=True
755            )
756            dsid = self.create_custom_namespace_dataset(namespace, schema.toDict())
757            data_type = DataAddType.CREATION
758        elif schema is not None:
759            raise ValueError(
760                f"Dataset schema already exists for namespace={namespace}, type={dataset_type}",
761                ", cannot create another!",
762            )
763        else:
764            data_type = DataAddType.HISTORICAL
765
766        logger.info("Created dataset with ID = {0}, uploading...".format(dsid))
767        result = self.add_custom_security_daily_data(
768            dsid,
769            dataset,
770            timeout,
771            block,
772            data_type=data_type,
773            no_exception_on_chunk_error=no_exception_on_chunk_error,
774        )
775        return {
776            "namespace": namespace,
777            "dataset_id": dsid,
778            "warnings": result["warnings"],
779            "errors": result["errors"],
780        }
def add_custom_security_daily_data( self, dataset_id, csv_data, timeout=600, block=True, data_type=<DataAddType.HISTORICAL: 2>, no_exception_on_chunk_error=False):
782    def add_custom_security_daily_data(
783        self,
784        dataset_id,
785        csv_data,
786        timeout=600,
787        block=True,
788        data_type=DataAddType.HISTORICAL,
789        no_exception_on_chunk_error=False,
790    ):
791        warnings = []
792        query_info = self.query_dataset(dataset_id)
793        if DataSetType[query_info["type"]] != DataSetType.SECURITIES_DAILY:
794            raise BoostedAPIException(
795                f"Incorrect dataset type: {query_info['type']}"
796                f" - Expected {DataSetType.SECURITIES_DAILY}"
797            )
798        warnings, errors = self.setup_chunk_and_upload_data(
799            dataset_id, csv_data, data_type, timeout, block, no_exception_on_chunk_error
800        )
801        if len(warnings) > 0:
802            logger.warning(
803                "Encountered {0} total warnings while uploading dataset.".format(len(warnings))
804            )
805        if len(errors) > 0:
806            raise BoostedAPIException(
807                "Encountered {0} total ERRORS while uploading dataset".format(len(errors))
808                + "\n".join(errors)
809            )
810        return {"warnings": warnings, "errors": errors}
def add_dependent_dataset( self, dataset, datasetName='DependentDataset', schema=None, timeout=600, block=True):
812    def add_dependent_dataset(
813        self, dataset, datasetName="DependentDataset", schema=None, timeout=600, block=True
814    ):
815        result = self.add_dependent_dataset_with_warnings(
816            dataset, datasetName, schema, timeout, block
817        )
818        return result["dataset_id"]
def add_dependent_dataset_with_warnings( self, dataset, datasetName='DependentDataset', schema=None, timeout=600, block=True, no_exception_on_chunk_error=False):
820    def add_dependent_dataset_with_warnings(
821        self,
822        dataset,
823        datasetName="DependentDataset",
824        schema=None,
825        timeout=600,
826        block=True,
827        no_exception_on_chunk_error=False,
828    ):
829        if not self.validate_dataframe(dataset):
830            logger.error("dataset failed validation.")
831            return None
832        if schema is None:
833            schema = infer_dataset_schema(datasetName, dataset, DataSetType.STOCK)
834        dsid = self.createDataset(schema.toDict())
835        logger.info("Creating dataset with ID = {0}.".format(dsid))
836        result = self.add_dependent_data(
837            dsid,
838            dataset,
839            timeout,
840            block,
841            data_type=DataAddType.CREATION,
842            no_exception_on_chunk_error=no_exception_on_chunk_error,
843        )
844        return {"dataset_id": dsid, "warnings": result["warnings"], "errors": result["errors"]}
def add_independent_dataset( self, dataset, datasetName='IndependentDataset', schema=None, timeout=600, block=True):
846    def add_independent_dataset(
847        self, dataset, datasetName="IndependentDataset", schema=None, timeout=600, block=True
848    ):
849        result = self.add_independent_dataset_with_warnings(
850            dataset, datasetName, schema, timeout, block
851        )
852        return result["dataset_id"]
def add_independent_dataset_with_warnings( self, dataset, datasetName='IndependentDataset', schema=None, timeout=600, block=True, no_exception_on_chunk_error=False):
854    def add_independent_dataset_with_warnings(
855        self,
856        dataset,
857        datasetName="IndependentDataset",
858        schema=None,
859        timeout=600,
860        block=True,
861        no_exception_on_chunk_error=False,
862    ):
863        if not self.validate_dataframe(dataset):
864            logger.error("dataset failed validation.")
865            return None
866        if schema is None:
867            schema = infer_dataset_schema(datasetName, dataset, DataSetType.STRATEGY)
868        schemaDict = schema.toDict()
869        if "configurationDataJson" not in schemaDict:
870            schemaDict["configurationDataJson"] = "{}"
871        dsid = self.createDataset(schemaDict)
872        logger.info("Creating dataset with ID = {0}.".format(dsid))
873        result = self.add_independent_data(
874            dsid,
875            dataset,
876            timeout,
877            block,
878            data_type=DataAddType.CREATION,
879            no_exception_on_chunk_error=no_exception_on_chunk_error,
880        )
881        return {"dataset_id": dsid, "warnings": result["warnings"], "errors": result["errors"]}
def add_global_dataset( self, dataset, datasetName='GlobalDataset', schema=None, timeout=600, block=True):
883    def add_global_dataset(
884        self, dataset, datasetName="GlobalDataset", schema=None, timeout=600, block=True
885    ):
886        result = self.add_global_dataset_with_warnings(dataset, datasetName, schema, timeout, block)
887        return result["dataset_id"]
def add_global_dataset_with_warnings( self, dataset, datasetName='GlobalDataset', schema=None, timeout=600, block=True, no_exception_on_chunk_error=False):
889    def add_global_dataset_with_warnings(
890        self,
891        dataset,
892        datasetName="GlobalDataset",
893        schema=None,
894        timeout=600,
895        block=True,
896        no_exception_on_chunk_error=False,
897    ):
898        if not self.validate_dataframe(dataset):
899            logger.error("dataset failed validation.")
900            return None
901        if schema is None:
902            schema = infer_dataset_schema(datasetName, dataset, DataSetType.GLOBAL)
903        dsid = self.createDataset(schema.toDict())
904        logger.info("Creating dataset with ID = {0}.".format(dsid))
905        result = self.add_global_data(
906            dsid,
907            dataset,
908            timeout,
909            block,
910            data_type=DataAddType.CREATION,
911            no_exception_on_chunk_error=no_exception_on_chunk_error,
912        )
913        return {"dataset_id": dsid, "warnings": result["warnings"], "errors": result["errors"]}
def add_independent_data( self, dataset_id, csv_data, timeout=600, block=True, data_type=<DataAddType.HISTORICAL: 2>, no_exception_on_chunk_error=False):
915    def add_independent_data(
916        self,
917        dataset_id,
918        csv_data,
919        timeout=600,
920        block=True,
921        data_type=DataAddType.HISTORICAL,
922        no_exception_on_chunk_error=False,
923    ):
924        query_info = self.query_dataset(dataset_id)
925        if DataSetType[query_info["type"]] != DataSetType.STRATEGY:
926            raise BoostedAPIException(
927                f"Incorrect dataset type: {query_info['type']}"
928                f" - Expected {DataSetType.STRATEGY}"
929            )
930        warnings, errors = self.setup_chunk_and_upload_data(
931            dataset_id, csv_data, data_type, timeout, block, no_exception_on_chunk_error
932        )
933        if len(warnings) > 0:
934            logger.warning(
935                "Encountered {0} total warnings while uploading dataset.".format(len(warnings))
936            )
937        if len(errors) > 0:
938            raise BoostedAPIException(
939                "Encountered {0} total ERRORS while uploading dataset".format(len(errors))
940                + "\n".join(errors)
941            )
942        return {"warnings": warnings, "errors": errors}
def add_dependent_data( self, dataset_id, csv_data, timeout=600, block=True, data_type=<DataAddType.HISTORICAL: 2>, no_exception_on_chunk_error=False):
944    def add_dependent_data(
945        self,
946        dataset_id,
947        csv_data,
948        timeout=600,
949        block=True,
950        data_type=DataAddType.HISTORICAL,
951        no_exception_on_chunk_error=False,
952    ):
953        warnings = []
954        query_info = self.query_dataset(dataset_id)
955        if DataSetType[query_info["type"]] != DataSetType.STOCK:
956            raise BoostedAPIException(
957                f"Incorrect dataset type: {query_info['type']}" f" - Expected {DataSetType.STOCK}"
958            )
959        warnings, errors = self.setup_chunk_and_upload_data(
960            dataset_id, csv_data, data_type, timeout, block, no_exception_on_chunk_error
961        )
962        if len(warnings) > 0:
963            logger.warning(
964                "Encountered {0} total warnings while uploading dataset.".format(len(warnings))
965            )
966        if len(errors) > 0:
967            raise BoostedAPIException(
968                "Encountered {0} total ERRORS while uploading dataset".format(len(errors))
969                + "\n".join(errors)
970            )
971        return {"warnings": warnings, "errors": errors}
def add_global_data( self, dataset_id, csv_data, timeout=600, block=True, data_type=<DataAddType.HISTORICAL: 2>, no_exception_on_chunk_error=False):
973    def add_global_data(
974        self,
975        dataset_id,
976        csv_data,
977        timeout=600,
978        block=True,
979        data_type=DataAddType.HISTORICAL,
980        no_exception_on_chunk_error=False,
981    ):
982        query_info = self.query_dataset(dataset_id)
983        if DataSetType[query_info["type"]] != DataSetType.GLOBAL:
984            raise BoostedAPIException(
985                f"Incorrect dataset type: {query_info['type']}" f" - Expected {DataSetType.GLOBAL}"
986            )
987        warnings, errors = self.setup_chunk_and_upload_data(
988            dataset_id, csv_data, data_type, timeout, block, no_exception_on_chunk_error
989        )
990        if len(warnings) > 0:
991            logger.warning(
992                "Encountered {0} total warnings while uploading dataset.".format(len(warnings))
993            )
994        if len(errors) > 0:
995            raise BoostedAPIException(
996                "Encountered {0} total ERRORS while uploading dataset".format(len(errors))
997                + "\n".join(errors)
998            )
999        return {"warnings": warnings, "errors": errors}
def get_csv_buffer(self):
1001    def get_csv_buffer(self):
1002        return io.StringIO()
def start_chunked_upload(self, dataset_id):
1004    def start_chunked_upload(self, dataset_id):
1005        url = self.base_uri + "/api/datasets/{0}/start-chunked-upload".format(dataset_id)
1006        headers = {"Authorization": "ApiKey " + self.api_key}
1007        res = requests.post(url, headers=headers, **self._request_params)
1008        if res.ok:
1009            return res.json()["result"]
1010        else:
1011            error_msg = self._try_extract_error_code(res)
1012            logger.error(error_msg)
1013            raise BoostedAPIException(
1014                "Failed to obtain dataset lock for upload: {0}.".format(error_msg)
1015            )
def abort_chunked_upload(self, dataset_id, chunk_id):
1017    def abort_chunked_upload(self, dataset_id, chunk_id):
1018        url = self.base_uri + "/api/datasets/{0}/abort-chunked-upload".format(dataset_id)
1019        headers = {"Authorization": "ApiKey " + self.api_key}
1020        params = {"uploadGroupId": chunk_id}
1021        res = requests.post(url, headers=headers, **self._request_params, params=params)
1022        if not res.ok:
1023            error_msg = self._try_extract_error_code(res)
1024            logger.error(error_msg)
1025            raise BoostedAPIException(
1026                "Failed to abort dataset lock during error: {0}.".format(error_msg)
1027            )
def check_dataset_ingestion_completion(self, dataset_id, chunk_id, start_time):
1029    def check_dataset_ingestion_completion(self, dataset_id, chunk_id, start_time):
1030        url = self.base_uri + "/api/datasets/{0}/upload-chunk-status".format(dataset_id)
1031        headers = {"Authorization": "ApiKey " + self.api_key}
1032        params = {"uploadGroupId": chunk_id}
1033        res = requests.get(url, headers=headers, **self._request_params, params=params)
1034        res = res.json()
1035
1036        finished = False
1037        warnings = []
1038        errors = []
1039
1040        if type(res) == dict:
1041            dataset_status = res["datasetStatus"]
1042            chunk_status = res["chunkStatus"]
1043            if chunk_status != ChunkStatus.PROCESSING.value:
1044                finished = True
1045                errors = res["errors"]
1046                warnings = res["warnings"]
1047                successful_rows = res["successfulRows"]
1048                total_rows = res["totalRows"]
1049                logger.info(
1050                    f"Successfully ingested {successful_rows} out of {total_rows} uploaded rows."
1051                )
1052                if chunk_status in [
1053                    ChunkStatus.SUCCESS.value,
1054                    ChunkStatus.WARNING.value,
1055                    ChunkStatus.ERROR.value,
1056                ]:
1057                    if dataset_status != "AVAILABLE":
1058                        raise BoostedAPIException(
1059                            "Dataset was unexpectedly unavailable after chunk upload finished."
1060                        )
1061                    else:
1062                        logger.info("Ingestion complete.  Uploaded data is ready for use.")
1063                elif chunk_status == ChunkStatus.ABORTED.value:
1064                    errors.append(
1065                        "Dataset chunk upload was aborted by server! Upload did not succeed."
1066                    )
1067                else:
1068                    errors.append("Unexpected data ingestion status: {0}.".format(chunk_status))
1069            logger.info(
1070                "Data ingestion still running.  Time elapsed={0}.".format(
1071                    datetime.datetime.now() - start_time
1072                )
1073            )
1074        else:
1075            raise BoostedAPIException("Unable to get status of dataset ingestion.")
1076        return {"finished": finished, "warnings": warnings, "errors": errors}
def setup_chunk_and_upload_data( self, dataset_id, csv_data, data_type, timeout=600, block=True, no_exception_on_chunk_error=False):
1112    def setup_chunk_and_upload_data(
1113        self,
1114        dataset_id,
1115        csv_data,
1116        data_type,
1117        timeout=600,
1118        block=True,
1119        no_exception_on_chunk_error=False,
1120    ):
1121        chunk_id = self.start_chunked_upload(dataset_id)
1122        logger.info("Obtained lock on dataset for upload: " + chunk_id)
1123        try:
1124            warnings, errors = self.chunk_and_upload_data(
1125                dataset_id, chunk_id, csv_data, timeout, no_exception_on_chunk_error
1126            )
1127            commit_warnings, commit_errors = self._commit_chunked_upload(
1128                dataset_id, chunk_id, data_type, block, timeout
1129            )
1130            return warnings + commit_warnings, errors + commit_errors
1131        except Exception:
1132            self.abort_chunked_upload(dataset_id, chunk_id)
1133            raise
def chunk_and_upload_data( self, dataset_id, chunk_id, csv_data, timeout=600, no_exception_on_chunk_error=False):
1135    def chunk_and_upload_data(
1136        self, dataset_id, chunk_id, csv_data, timeout=600, no_exception_on_chunk_error=False
1137    ):
1138        if isinstance(csv_data, pd.core.frame.DataFrame):
1139            if not isinstance(csv_data.index, pd.core.indexes.datetimes.DatetimeIndex):
1140                raise BoostedAPIException("DataFrame must have DatetimeIndex as index type.")
1141
1142            warnings = []
1143            errors = []
1144            logger.info("Uploading yearly.")
1145            for t in csv_data.index.to_period("Y").unique():
1146                if t is pd.NaT:
1147                    continue
1148
1149                # serialize bit to string
1150                buf = self.get_csv_buffer()
1151                yearly_csv = csv_data.loc[str(t)]
1152                yearly_csv.to_csv(buf, header=True)
1153                raw_csv = buf.getvalue()
1154
1155                # we are already chunking yearly... but if the csv still exceeds a healthy
1156                # limit of 50mb the final line of defence is to ignore date boundaries and
1157                # just chunk the rows. This is mostly for the cloudflare upload limit.
1158                size_lim = 50 * 1000 * 1000
1159                est_csv_size = sys.getsizeof(raw_csv)
1160                if est_csv_size > size_lim:
1161                    del raw_csv, buf
1162                    logger.info("Yearly data too large for single upload, chunking further...")
1163                    chunks = []
1164                    nchunks = math.ceil(est_csv_size / size_lim)
1165                    rows_per_chunk = math.ceil(len(yearly_csv) / nchunks)
1166                    for i in range(0, len(yearly_csv), rows_per_chunk):
1167                        buf = self.get_csv_buffer()
1168                        split_csv = yearly_csv.iloc[i : i + rows_per_chunk]
1169                        split_csv.to_csv(buf, header=True)
1170                        split_csv = buf.getvalue()
1171                        chunks.append(
1172                            (
1173                                "{0}-{1}".format(i + 1, min(len(yearly_csv), i + rows_per_chunk)),
1174                                split_csv,
1175                            )
1176                        )
1177                else:
1178                    chunks = [("all", raw_csv)]
1179
1180                for i, (rows_descriptor, chunk_csv) in enumerate(chunks):
1181                    chunk_descriptor = "{0} in yearly chunk {1}".format(rows_descriptor, t)
1182                    logger.info(
1183                        "Uploading rows:"
1184                        + chunk_descriptor
1185                        + " (chunk {0} of {1}):".format(i + 1, len(chunks))
1186                    )
1187                    _, new_warnings, new_errors = self.upload_dataset_chunk(
1188                        chunk_descriptor,
1189                        dataset_id,
1190                        chunk_id,
1191                        chunk_csv,
1192                        timeout,
1193                        no_exception_on_chunk_error,
1194                    )
1195                    warnings.extend(new_warnings)
1196                    errors.extend(new_errors)
1197            return warnings, errors
1198
1199        elif isinstance(csv_data, str):
1200            _, warnings, errors = self.upload_dataset_chunk(
1201                "all data", dataset_id, chunk_id, csv_data, timeout, no_exception_on_chunk_error
1202            )
1203            return warnings, errors
1204        else:
1205            raise BoostedAPIException("Expected CSV as str or Pandas DataFrame.")
def upload_dataset_chunk( self, chunk_descriptor, dataset_id, chunk_id, csv_data, timeout=600, no_exception_on_chunk_error=False):
1207    def upload_dataset_chunk(
1208        self,
1209        chunk_descriptor,
1210        dataset_id,
1211        chunk_id,
1212        csv_data,
1213        timeout=600,
1214        no_exception_on_chunk_error=False,
1215    ):
1216        logger.info("Starting upload: " + chunk_descriptor)
1217        url = self.base_uri + "/api/datasets/{0}/upload-dataset-chunk".format(dataset_id)
1218        headers = {"Authorization": "ApiKey " + self.api_key}
1219        files_req = {}
1220        warnings = []
1221        errors = []
1222
1223        # make the network request
1224        target = ("uploaded_data.csv", csv_data, "text/csv")
1225        files_req["dataFile"] = target
1226        params = {"uploadGroupId": chunk_id}
1227        res = requests.post(
1228            url,
1229            params=params,
1230            files=files_req,
1231            headers=headers,
1232            timeout=timeout,
1233            **self._request_params,
1234        )
1235
1236        if res.ok:
1237            logger.info(
1238                (
1239                    "Chunk upload completed.  "
1240                    "Ingestion started.  "
1241                    "Please wait until the data is in AVAILABLE state."
1242                )
1243            )
1244            if "warnings" in res.json():
1245                warnings = res.json()["warnings"]
1246                if len(warnings) > 0:
1247                    logger.warning("Uploaded chunk encountered data warnings: ")
1248                for w in warnings:
1249                    logger.warning(w)
1250        else:
1251            reason = "Upload failed: {0}, {1}".format(res.text, res.reason)
1252            logger.error(reason)
1253            if no_exception_on_chunk_error:
1254                errors.append(
1255                    "Chunk {0} failed: {1}. ".format(chunk_descriptor, reason)
1256                    + "Your data was only PARTIALLY uploaded. "
1257                    + "Please reattempt the upload of this chunk."
1258                )
1259            else:
1260                raise BoostedAPIException(reason)
1261
1262        return res, warnings, errors
def getAllocationsForDate(self, portfolio_id, date, rollback_to_last_available_date):
1264    def getAllocationsForDate(self, portfolio_id, date, rollback_to_last_available_date):
1265        date = self.__iso_format(date)
1266        endpoint = "latest-allocations" if rollback_to_last_available_date else "allocations"
1267        url = self.base_uri + "/api/portfolios/{0}/{1}".format(portfolio_id, endpoint)
1268        headers = {"Authorization": "ApiKey " + self.api_key}
1269        params = {"date": date}
1270        logger.info("Retrieving allocations information for date {0}.".format(date))
1271        res = requests.get(url, params=params, headers=headers, **self._request_params)
1272        if res.ok:
1273            logger.info("Allocations retrieval successful.")
1274            return res.json()
1275        else:
1276            error_msg = self._try_extract_error_code(res)
1277            raise BoostedAPIException("Failed to retrieve allocations: {0}.".format(error_msg))
def getAllocationsForDateV2(self, portfolio_id, date, rollback_to_last_available_date):
1280    def getAllocationsForDateV2(self, portfolio_id, date, rollback_to_last_available_date):
1281        date = self.__iso_format(date)
1282        endpoint = "latest-allocations-v2" if rollback_to_last_available_date else "allocations-v2"
1283        url = self.base_uri + "/api/portfolios/{0}/{1}".format(portfolio_id, endpoint)
1284        headers = {"Authorization": "ApiKey " + self.api_key}
1285        params = {"date": date}
1286        logger.info("Retrieving allocations information for date {0}.".format(date))
1287        res = requests.get(url, params=params, headers=headers, **self._request_params)
1288        if res.ok:
1289            logger.info("Allocations retrieval successful.")
1290            return res.json()
1291        else:
1292            error_msg = self._try_extract_error_code(res)
1293            raise BoostedAPIException("Failed to retrieve allocations: {0}.".format(error_msg))
def getAllocationsByDates(self, portfolio_id, dates=None):
1295    def getAllocationsByDates(self, portfolio_id, dates=None):
1296        url = self.base_uri + "/api/portfolios/{0}/allocationsByDate".format(portfolio_id)
1297        headers = {"Authorization": "ApiKey " + self.api_key}
1298        if dates is not None:
1299            fmt_dates = []
1300            for d in dates:
1301                fmt_dates.append(self.__iso_format(d))
1302            fmt_dates_str = ",".join(fmt_dates)
1303            params: Dict = {"dates": fmt_dates_str}
1304            logger.info("Retrieving allocations information for dates {0}.".format(fmt_dates))
1305        else:
1306            params = {"dates": None}
1307            logger.info("Retrieving allocations information for all dates")
1308        res = requests.get(url, params=params, headers=headers, **self._request_params)
1309        if res.ok:
1310            logger.info("Allocations retrieval successful.")
1311            return res.json()
1312        else:
1313            error_msg = self._try_extract_error_code(res)
1314            raise BoostedAPIException("Failed to retrieve allocations: {0}.".format(error_msg))
def getSignalsForDate(self, portfolio_id, date, rollback_to_last_available_date):
1316    def getSignalsForDate(self, portfolio_id, date, rollback_to_last_available_date):
1317        date = self.__iso_format(date)
1318        endpoint = "latest-signals" if rollback_to_last_available_date else "signals"
1319        url = self.base_uri + "/api/portfolios/{0}/{1}".format(portfolio_id, endpoint)
1320        headers = {"Authorization": "ApiKey " + self.api_key}
1321        params = {"date": date}
1322        logger.info("Retrieving signals information for date {0}.".format(date))
1323        res = requests.get(url, params=params, headers=headers, **self._request_params)
1324        if res.ok:
1325            logger.info("Signals retrieval successful.")
1326            return res.json()
1327        else:
1328            error_msg = self._try_extract_error_code(res)
1329            raise BoostedAPIException("Failed to retrieve signals: {0}.".format(error_msg))
def getSignalsForAllDates(self, portfolio_id, dates=None):
1331    def getSignalsForAllDates(self, portfolio_id, dates=None):
1332        url = self.base_uri + "/api/portfolios/{0}/signalsByDate".format(portfolio_id)
1333        headers = {"Authorization": "ApiKey " + self.api_key}
1334        params = {}
1335        if dates is not None:
1336            fmt_dates = []
1337            for d in dates:
1338                fmt_dates.append(self.__iso_format(d))
1339            fmt_dates_str = ",".join(fmt_dates)
1340            params = {"dates": fmt_dates_str}
1341            logger.info("Retrieving signals information for dates {0}.".format(fmt_dates))
1342        else:
1343            params = {"dates": None}
1344            logger.info("Retrieving signals information for all dates")
1345        res = requests.get(url, params=params, headers=headers, **self._request_params)
1346        if res.ok:
1347            logger.info("Signals retrieval successful.")
1348            return res.json()
1349        else:
1350            error_msg = self._try_extract_error_code(res)
1351            raise BoostedAPIException("Failed to retrieve signals: {0}.".format(error_msg))
def getEquityAccuracy( self, model_id: str, portfolio_id: str, tickers: List[str], start_date: Union[datetime.date, str, NoneType] = None, end_date: Union[datetime.date, str, NoneType] = None) -> Dict[str, Dict[str, Any]]:
1353    def getEquityAccuracy(
1354        self,
1355        model_id: str,
1356        portfolio_id: str,
1357        tickers: List[str],
1358        start_date: Optional[BoostedDate] = None,
1359        end_date: Optional[BoostedDate] = None,
1360    ) -> Dict[str, Dict[str, Any]]:
1361        data: Dict[str, Any] = {}
1362        if start_date is not None:
1363            start_date = convert_date(start_date)
1364            data["startDate"] = start_date.isoformat()
1365        if end_date is not None:
1366            end_date = convert_date(end_date)
1367            data["endDate"] = end_date.isoformat()
1368
1369        if start_date and end_date:
1370            validate_start_and_end_dates(start_date, end_date)
1371
1372        tickers_stream = ",".join(tickers)
1373        data["tickers"] = tickers_stream
1374        data["timestamp"] = time.strftime("%H:%M:%S")
1375        data["shouldRecalc"] = True
1376        url = self.base_uri + f"/api/analysis/equity-accuracy/{model_id}/{portfolio_id}"
1377        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
1378
1379        logger.info(
1380            f"Retrieving equity accuracy data for date range {start_date} to {end_date} "
1381            f"for tickers: {tickers}."
1382        )
1383
1384        # Now create dataframes from the JSON output.
1385        metrics = [
1386            "hit_rate_mean",
1387            "hit_rate_median",
1388            "excess_return_mean",
1389            "excess_return_median",
1390            "return",
1391            "excess_return",
1392        ]
1393
1394        # send the request, retry if failed
1395        MAX_RETRIES = 10  # max of number of retries until timeout
1396        SLEEP_TIME = 3  # waiting time between requests
1397
1398        num_retries = 0
1399        success = False
1400        while not success and num_retries < MAX_RETRIES:
1401            res = requests.post(url, data=json.dumps(data), headers=headers, **self._request_params)
1402            if res.ok:
1403                logger.info("Equity Accuracy Data retrieval successful.")
1404                info = res.json()
1405                success = True
1406            else:
1407                data["shouldRecalc"] = False
1408                num_retries += 1
1409                time.sleep(SLEEP_TIME)
1410
1411        if not success:
1412            raise BoostedAPIException("Failed to retrieve equity accuracy: Request timeout.")
1413
1414        for ticker, accuracy_data in info.items():
1415            for metric in metrics:
1416                metric_matrix = accuracy_data[metric]
1417                if not isinstance(metric_matrix, str):
1418                    # Set the index to the quintile label, and remove it from the data
1419                    index = []
1420                    for row in metric_matrix[1:]:
1421                        index.append(row.pop(0))
1422
1423                    # columns are "1D", "5D", etc.
1424                    df = pd.DataFrame(metric_matrix[1:], columns=metric_matrix[0][1:], index=index)
1425                    accuracy_data[metric] = df
1426        return info
def getHistoricalTradeDates(self, portfolio_id, start_date=None, end_date=None):
1428    def getHistoricalTradeDates(self, portfolio_id, start_date=None, end_date=None):
1429        end_date = self.__to_date_obj(end_date or datetime.date.today())
1430        start_date = self.__iso_format(start_date or (end_date - timedelta(days=365)))
1431        end_date = self.__iso_format(end_date)
1432
1433        url = self.base_uri + "/api/portfolios/{0}/tradingDates".format(portfolio_id)
1434        headers = {"Authorization": "ApiKey " + self.api_key}
1435        params = {"startDate": start_date, "endDate": end_date}
1436
1437        logger.info(
1438            "Retrieving historical trade dates data for date range {0} to {1}.".format(
1439                start_date, end_date
1440            )
1441        )
1442        res = requests.get(url, params=params, headers=headers, **self._request_params)
1443        if res.ok:
1444            logger.info("Trading dates retrieval successful.")
1445            return res.json()["dates"]
1446        else:
1447            error_msg = self._try_extract_error_code(res)
1448            raise BoostedAPIException("Failed to retrieve trading dates: {0}.".format(error_msg))
def getRankingsForAllDates(self, portfolio_id, dates=None):
1450    def getRankingsForAllDates(self, portfolio_id, dates=None):
1451        url = self.base_uri + "/api/portfolios/{0}/rankingsByDate".format(portfolio_id)
1452        headers = {"Authorization": "ApiKey " + self.api_key}
1453        params = {}
1454        if dates is not None:
1455            fmt_dates = []
1456            for d in dates:
1457                fmt_dates.append(self.__iso_format(d))
1458            fmt_dates_str = ",".join(fmt_dates)
1459            params = {"dates": fmt_dates_str}
1460            logger.info("Retrieving rankings information for date {0}.".format(fmt_dates_str))
1461        else:
1462            params = {"dates": None}
1463            logger.info("Retrieving rankings information for all dates")
1464        res = requests.get(url, params=params, headers=headers, **self._request_params)
1465        if res.ok:
1466            logger.info("Rankings retrieval successful.")
1467            return res.json()
1468        else:
1469            error_msg = self._try_extract_error_code(res)
1470            raise BoostedAPIException("Failed to retrieve rankings: {0}.".format(error_msg))
def getRankingsForDate(self, portfolio_id, date, rollback_to_last_available_date):
1472    def getRankingsForDate(self, portfolio_id, date, rollback_to_last_available_date):
1473        date = self.__iso_format(date)
1474        endpoint = "latest-rankings" if rollback_to_last_available_date else "rankings"
1475        url = self.base_uri + "/api/{0}/{1}/{2}".format(endpoint, portfolio_id, date)
1476        headers = {"Authorization": "ApiKey " + self.api_key}
1477        logger.info("Retrieving rankings information for date {0}.".format(date))
1478        res = requests.get(url, headers=headers, **self._request_params)
1479        if res.ok:
1480            logger.info("Rankings retrieval successful.")
1481            return res.json()
1482        else:
1483            error_msg = self._try_extract_error_code(res)
1484            raise BoostedAPIException("Failed to retrieve rankings: {0}.".format(error_msg))
def sendModelRecalc(self, model_id):
1486    def sendModelRecalc(self, model_id):
1487        url = self.base_uri + "/api/models/{0}/recalc".format(model_id)
1488        logger.info("Sending model recalc request for model {0}".format(model_id))
1489        headers = {"Authorization": "ApiKey " + self.api_key}
1490        res = requests.put(url, headers=headers, **self._request_params)
1491        if not res.ok:
1492            error_msg = self._try_extract_error_code(res)
1493            logger.error(error_msg)
1494            raise BoostedAPIException(
1495                "Failed to send model recalc request - "
1496                + "the model in UI may be out of date: {0}.".format(error_msg)
1497            )
def sendRecalcAllModelPortfolios(self, model_id: str):
1499    def sendRecalcAllModelPortfolios(self, model_id: str):
1500        """Recalculates all portfolios under a given model ID.
1501
1502        Args:
1503            model_id: the model ID
1504        Raises:
1505            BoostedAPIException: if the Boosted API request fails
1506        """
1507        url = self.base_uri + f"/api/models/{model_id}/recalc-all-portfolios"
1508        logger.info(f"Sending portfolio recalc requests for all portfolios under {model_id=}.")
1509        headers = {"Authorization": "ApiKey " + self.api_key}
1510        res = requests.put(url, headers=headers, **self._request_params)
1511        if not res.ok:
1512            error_msg = self._try_extract_error_code(res)
1513            logger.error(error_msg)
1514            raise BoostedAPIException(
1515                f"Failed to send recalc request for all portfolios under {model_id=} - {error_msg}."
1516            )

Recalculates all portfolios under a given model ID.

Args: model_id: the model ID Raises: BoostedAPIException: if the Boosted API request fails

def sendPortfolioRecalc(self, portfolio_id: str):
1518    def sendPortfolioRecalc(self, portfolio_id: str):
1519        """Recalculates a single portfolio by its portfolio ID.
1520
1521        Args:
1522            portfolio_id: the portfolio ID to recalculate
1523        Raises:
1524            BoostedAPIException: if the Boosted API request fails
1525        """
1526        url = self.base_uri + "/api/graphql"
1527        logger.info(f"Sending portfolio recalc request for {portfolio_id=}.")
1528        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
1529        qry = """
1530            mutation recalcPortfolio($input: RecalculatePortfolioInput!) {
1531                recalculatePortfolio(input: $input) {
1532                    success
1533                    errors
1534                }
1535            }
1536            """
1537        req_json = {
1538            "query": qry,
1539            "variables": {"input": {"portfolioId": f"{portfolio_id}", "allowForceRecalc": "true"}},
1540        }
1541        res = requests.post(url, json=req_json, headers=headers, **self._request_params)
1542        if not res.ok or res.json().get("errors"):
1543            error_msg = self._try_extract_error_code(res)
1544            logger.error(error_msg)
1545            raise BoostedAPIException(
1546                f"Failed to send portfolio recalc request for {portfolio_id=} - {error_msg}."
1547            )

Recalculates a single portfolio by its portfolio ID.

Args: portfolio_id: the portfolio ID to recalculate Raises: BoostedAPIException: if the Boosted API request fails

def add_uploaded_model_data(self, url, csv_data, request_data, timeout=600):
1549    def add_uploaded_model_data(self, url, csv_data, request_data, timeout=600):
1550        logger.info("Starting upload.")
1551        headers = {"Authorization": "ApiKey " + self.api_key}
1552        files_req: Dict = {}
1553        target: Tuple[str, Any, str] = ("data.csv", None, "text/csv")
1554        warnings = []
1555        if isinstance(csv_data, pd.core.frame.DataFrame):
1556            buf = io.StringIO()
1557            csv_data.to_csv(buf, header=False)
1558            if not isinstance(csv_data.index, pd.core.indexes.datetimes.DatetimeIndex):
1559                raise BoostedAPIException("DataFrame must have DatetimeIndex as index type.")
1560            target = ("uploaded_data.csv", buf.getvalue(), "text/csv")
1561            files_req["dataFile"] = target
1562            res = requests.post(
1563                url,
1564                files=files_req,
1565                data=request_data,
1566                headers=headers,
1567                timeout=timeout,
1568                **self._request_params,
1569            )
1570        elif isinstance(csv_data, str):
1571            target = ("uploaded_data.csv", csv_data, "text/csv")
1572            files_req["dataFile"] = target
1573            res = requests.post(
1574                url,
1575                files=files_req,
1576                data=request_data,
1577                headers=headers,
1578                timeout=timeout,
1579                **self._request_params,
1580            )
1581        else:
1582            raise BoostedAPIException("Expected CSV as str or Pandas DataFrame.")
1583        if res.ok:
1584            logger.info("Signals upload completed.")
1585            result = res.json()["result"]
1586            if "warningMessages" in result:
1587                warnings = result["warningMessages"]
1588        else:
1589            error_str = "Signals upload failed: {0}, {1}".format(res.text, res.reason)
1590            logger.error(error_str)
1591            raise BoostedAPIException(error_str)
1592
1593        return res, warnings
def createSignalsModel(self, csv_data, model_name, timeout=600):
1595    def createSignalsModel(self, csv_data, model_name, timeout=600):
1596        warnings = []
1597        url = self.base_uri + "/api/models/upload/signals/create"
1598        request_data = {"modelName": model_name, "uploadName": model_name}
1599        res, warnings = self.add_uploaded_model_data(url, csv_data, request_data, timeout)
1600        result = res.json()["result"]
1601        model_id = result["modelId"]
1602        self.sendModelRecalc(model_id)
1603        return model_id, warnings
def addToUploadedModel(self, model_id, csv_data, timeout=600, recalc_model=True):
1605    def addToUploadedModel(self, model_id, csv_data, timeout=600, recalc_model=True):
1606        warnings = []
1607        url = self.base_uri + "/api/models/{0}/upload/add-data".format(model_id)
1608        request_data: Dict = {}
1609        _, warnings = self.add_uploaded_model_data(url, csv_data, request_data, timeout)
1610        if recalc_model:
1611            self.sendModelRecalc(model_id)
1612        return warnings
def addSignalsToUploadedModel( self, model_id: str, csv_data: Union[pandas.core.frame.DataFrame, str], timeout: int = 600, recalc_all: bool = False, recalc_portfolio_ids: Union[List[str], NoneType] = None) -> List[str]:
1614    def addSignalsToUploadedModel(
1615        self,
1616        model_id: str,
1617        csv_data: Union[pd.core.frame.DataFrame, str],
1618        timeout: int = 600,
1619        recalc_all: bool = False,
1620        recalc_portfolio_ids: Optional[List[str]] = None,
1621    ) -> List[str]:
1622        """
1623        Add signals to an uploaded model and then recalculate a random portfolio under that model.
1624
1625        Args:
1626            model_id: model ID
1627            csv_data: pandas DataFrame, or a string with signals to upload.
1628            timeout (optional): Timeout for initial upload request in seconds.
1629            recalc_all (optional): if True, recalculates all portfolios in the model.
1630            recalc_portfolio_ids (optional): List of portfolio IDs under the model to re-calculate.
1631        """
1632        warnings = self.addToUploadedModel(model_id, csv_data, timeout, recalc_model=False)
1633
1634        if recalc_all:
1635            self.sendRecalcAllModelPortfolios(model_id)
1636        elif recalc_portfolio_ids:
1637            for portfolio_id in recalc_portfolio_ids:
1638                self.sendPortfolioRecalc(portfolio_id)
1639        else:
1640            self.sendModelRecalc(model_id)
1641        return warnings

Add signals to an uploaded model and then recalculate a random portfolio under that model.

Args: model_id: model ID csv_data: pandas DataFrame, or a string with signals to upload. timeout (optional): Timeout for initial upload request in seconds. recalc_all (optional): if True, recalculates all portfolios in the model. recalc_portfolio_ids (optional): List of portfolio IDs under the model to re-calculate.

def getSignalsFromUploadedModel(self, model_id, date=None):
1643    def getSignalsFromUploadedModel(self, model_id, date=None):
1644        date = self.__iso_format(date)
1645        url = self.base_uri + "/api/models/{0}/upload/signals".format(model_id)
1646        headers = {"Authorization": "ApiKey " + self.api_key}
1647        params = {"date": date}
1648        logger.info("Retrieving uploaded signals information")
1649        res = requests.get(url, params=params, headers=headers, **self._request_params)
1650        if res.ok:
1651            result = pd.DataFrame.from_dict(res.json()["result"])
1652            # ensure column order
1653            result = result[["date", "isin", "country", "currency", "weight"]]
1654            result["date"] = pd.to_datetime(result["date"], format="%Y-%m-%d")
1655            result = result.set_index("date")
1656            logger.info("Signals retrieval successful.")
1657            return result
1658        else:
1659            error_msg = self._try_extract_error_code(res)
1660            raise BoostedAPIException("Failed to retrieve signals: {0}.".format(error_msg))
def getPortfolioSettings(self, portfolio_id, timeout=600):
1662    def getPortfolioSettings(self, portfolio_id, timeout=600):
1663        url = self.base_uri + "/api/portfolio-settings/{0}".format(portfolio_id)
1664        headers = {"Authorization": "ApiKey " + self.api_key}
1665        res = requests.get(url, headers=headers, **self._request_params)
1666        if res.ok:
1667            return PortfolioSettings(res.json())
1668        else:
1669            error_msg = self._try_extract_error_code(res)
1670            logger.error(error_msg)
1671            raise BoostedAPIException(
1672                "Failed to retrieve portfolio settings: {0}.".format(error_msg)
1673            )
def createPortfolioWithPortfolioSettings( self, model_id, portfolio_name, portfolio_description, portfolio_settings, timeout=600):
1675    def createPortfolioWithPortfolioSettings(
1676        self, model_id, portfolio_name, portfolio_description, portfolio_settings, timeout=600
1677    ):
1678        url = self.base_uri + "/api/models/{0}/constraints/add".format(model_id)
1679        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
1680        setting_string = json.dumps(portfolio_settings.settings)
1681        logger.info("Creating new portfolio with specified setting: {}".format(setting_string))
1682        params = {
1683            "name": portfolio_name,
1684            "description": portfolio_description,
1685            "settings": setting_string,
1686            "validate": "true",
1687        }
1688        res = requests.put(url, json=params, headers=headers, **self._request_params)
1689        response = res.json()
1690        if res.ok:
1691            return response
1692        else:
1693            error_msg = self._try_extract_error_code(res)
1694            logger.error(error_msg)
1695            raise BoostedAPIException(
1696                "Failed to create portfolio with the specified settings: {0}.".format(error_msg)
1697            )
def getGbiIdFromIdentCountryCurrencyDate( self, ident_country_currency_dates: List[boosted.api.api_type.DateIdentCountryCurrency], timeout: int = 600) -> List[Union[boosted.api.api_type.GbiIdSecurity, NoneType]]:
1699    def getGbiIdFromIdentCountryCurrencyDate(
1700        self, ident_country_currency_dates: List[DateIdentCountryCurrency], timeout: int = 600
1701    ) -> List[Optional[GbiIdSecurity]]:
1702        url = self.base_uri + "/api/custom-stock-data/map-identifiers-simple"
1703        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
1704        identifiers = [
1705            {
1706                "row": idx,
1707                "date": identifier.date,
1708                "isin": identifier.identifier if identifier.id_type == ColumnSubRole.ISIN else None,
1709                "symbol": (
1710                    identifier.identifier if identifier.id_type == ColumnSubRole.SYMBOL else None
1711                ),
1712                "countryPreference": identifier.country,
1713                "currencyPreference": identifier.currency,
1714            }
1715            for idx, identifier in enumerate(ident_country_currency_dates)
1716        ]
1717        params = json.dumps({"identifiers": identifiers})
1718        logger.info(
1719            "Retrieving GBI-ID mapping for {} identifier tuples...".format(
1720                len(ident_country_currency_dates)
1721            )
1722        )
1723        res = requests.post(url, data=params, headers=headers, **self._request_params)
1724
1725        if res.ok:
1726            result = res.json()
1727            warnings = result["warnings"]
1728            if warnings:
1729                for warning in warnings:
1730                    logger.warn(f"Mapping warning: {warning}")
1731            gbiSecurities = []
1732            for idx, ident in enumerate(result["mappedIdentifiers"]):
1733                if ident is None:
1734                    security = None
1735                else:
1736                    security = GbiIdSecurity(
1737                        ident["gbiId"],
1738                        ident_country_currency_dates[idx],
1739                        ident["symbol"],
1740                        ident["companyName"],
1741                    )
1742                gbiSecurities.append(security)
1743
1744            return gbiSecurities
1745        else:
1746            error_msg = self._try_extract_error_code(res)
1747            raise BoostedAPIException(
1748                "Failed to retrieve identifier mappings: {0}.".format(error_msg)
1749            )
def getGbiIdFromIsinCountryCurrencyDate(self, isin_country_currency_dates, timeout=600):
1752    def getGbiIdFromIsinCountryCurrencyDate(self, isin_country_currency_dates, timeout=600):
1753        return self.getGbiIdFromIdentCountryCurrencyDate(
1754            ident_country_currency_dates=isin_country_currency_dates, timeout=timeout
1755        )
def getDatasetDates(self, dataset_id):
1784    def getDatasetDates(self, dataset_id):
1785        url = self.base_uri + f"/api/datasets/{dataset_id}"
1786        headers = {"Authorization": "ApiKey " + self.api_key}
1787        res = requests.get(url, headers=headers, **self._request_params)
1788        if res.ok:
1789            dataset = res.json()
1790            valid_to_array = dataset.get("validTo")
1791            valid_to_date = None
1792            valid_from_array = dataset.get("validFrom")
1793            valid_from_date = None
1794            if valid_to_array:
1795                valid_to_date = datetime.date(
1796                    valid_to_array[0], valid_to_array[1], valid_to_array[2]
1797                )
1798            if valid_from_array:
1799                valid_from_date = datetime.date(
1800                    valid_from_array[0], valid_from_array[1], valid_from_array[2]
1801                )
1802            return {"validTo": valid_to_date, "validFrom": valid_from_date}
1803        else:
1804            error_msg = self._try_extract_error_code(res)
1805            logger.error(error_msg)
1806            raise BoostedAPIException("Failed to query dataset: {0}.".format(error_msg))
def getRankingAnalysis(self, model_id, date):
1808    def getRankingAnalysis(self, model_id, date):
1809        url = (
1810            self.base_uri
1811            + f"/api/explain-trades/analysis/{model_id}/{self.__iso_format(date)}/json"
1812        )
1813        headers = {"Authorization": "ApiKey " + self.api_key}
1814        analysis_res = requests.get(url, headers=headers, **self._request_params)
1815        if analysis_res.ok:
1816            ranking_dict = analysis_res.json()
1817            feature_name_dict = self.__get_rankings_ref_translation(model_id)
1818            columns = [feature_name_dict[col] for col in ranking_dict["columns"]]
1819
1820            df = protoCubeJsonDataToDataFrame(
1821                ranking_dict["data"],
1822                "Data Buckets",
1823                ranking_dict["rows"],
1824                "Feature Names",
1825                columns,
1826                ranking_dict["fields"],
1827            )
1828            return df
1829        else:
1830            error_msg = self._try_extract_error_code(analysis_res)
1831            logger.error(error_msg)
1832            raise BoostedAPIException("Failed to get ranking analysis: {0}.".format(error_msg))
def getExplainForPortfolio( self, model_id, portfolio_id, date, index_by_symbol: bool = False, index_by_all_metadata: bool = False):
1834    def getExplainForPortfolio(
1835        self,
1836        model_id,
1837        portfolio_id,
1838        date,
1839        index_by_symbol: bool = False,
1840        index_by_all_metadata: bool = False,
1841    ):
1842        """
1843        Gets the ranking 2.0 explain data for the given model on the given date
1844        filtered by portfolio.
1845
1846        Parameters
1847        ----------
1848        model_id: str
1849            Model ID.  Model IDs can be retrieved by clicking on the copy to clipboard
1850            button next to your model's name in the Model Summary Page in Boosted
1851            Insights.
1852        portfolio_id: str
1853            Portfolio ID.  Portfolio IDs can be retrieved from portfolio's configuration page.
1854        date: datetime.date or YYYY-MM-DD string
1855            Date of the data to retrieve.
1856        index_by_symbol: bool
1857            If true, index by stock symbol instead of ISIN.
1858        index_by_all_metadata: bool
1859            If true, index by all metadata: ISIN, stock symbol, currency, and country.
1860            Overrides index_by_symbol.
1861
1862        Returns
1863        -------
1864        pandas.DataFrame
1865            Pandas DataFrame containing your data indexed by ISINs/Symbol/all metadata
1866            and feature names, filtered by portfolio.
1867        ___
1868        """
1869        indices = ["Symbol", "ISINs", "Country", "Currency"]
1870        raw_explain_df = self.getRankingExplain(
1871            model_id, date, index_by_symbol=False, index_by_all_metadata=True
1872        )
1873        pa_ratings_dict = self.getRankingsForDate(portfolio_id, date, False)
1874
1875        ratings = pa_ratings_dict["rankings"]
1876        ratings_df = pd.DataFrame(ratings)
1877        ratings_df = ratings_df[["symbol", "isin", "country", "currency"]]
1878        ratings_df.columns = pd.Index(indices)
1879        ratings_df.set_index(indices, inplace=True)
1880
1881        # inner join to only get the securities in both data frames
1882        result_df = raw_explain_df.merge(ratings_df, left_index=True, right_index=True, how="inner")
1883
1884        # set index based on input parameters
1885        if index_by_symbol and not index_by_all_metadata:
1886            result_df = result_df.reset_index()
1887            result_df = result_df.drop(columns=["ISINs", "Currency", "Country"])
1888            result_df.set_index(["Symbol", "Feature Names"], inplace=True)
1889        elif not index_by_symbol and not index_by_all_metadata:
1890            result_df = result_df.reset_index()
1891            result_df = result_df.drop(columns=["Symbol", "Currency", "Country"])
1892            result_df.set_index(["ISINs", "Feature Names"], inplace=True)
1893
1894        return result_df

Gets the ranking 2.0 explain data for the given model on the given date filtered by portfolio.

Parameters

model_id: str Model ID. Model IDs can be retrieved by clicking on the copy to clipboard button next to your model's name in the Model Summary Page in Boosted Insights. portfolio_id: str Portfolio ID. Portfolio IDs can be retrieved from portfolio's configuration page. date: datetime.date or YYYY-MM-DD string Date of the data to retrieve. index_by_symbol: bool If true, index by stock symbol instead of ISIN. index_by_all_metadata: bool If true, index by all metadata: ISIN, stock symbol, currency, and country. Overrides index_by_symbol.

Returns

pandas.DataFrame Pandas DataFrame containing your data indexed by ISINs/Symbol/all metadata and feature names, filtered by portfolio.


def getRankingExplain( self, model_id, date, index_by_symbol: bool = False, index_by_all_metadata: bool = False):
1896    def getRankingExplain(
1897        self, model_id, date, index_by_symbol: bool = False, index_by_all_metadata: bool = False
1898    ):
1899        """
1900        Gets the ranking 2.0 explain data for the given model on the given date
1901
1902        Parameters
1903        ----------
1904        model_id: str
1905            Model ID.  Model IDs can be retrieved by clicking on the copy to clipboard
1906            button next to your model's name in the Model Summary Page in Boosted
1907            Insights.
1908        date: datetime.date or YYYY-MM-DD string
1909            Date of the data to retrieve.
1910        index_by_symbol: bool
1911            If true, index by stock symbol instead of ISIN.
1912        index_by_all_metadata: bool
1913            If true, index by all metadata: ISIN, stock symbol, currency, and country.
1914            Overrides index_by_symbol.
1915
1916        Returns
1917        -------
1918        pandas.DataFrame
1919            Pandas DataFrame containing your data indexed by ISINs/Symbol/all metadata
1920            and feature names.
1921        ___
1922        """
1923        url = (
1924            self.base_uri + f"/api/explain-trades/explain/{model_id}/{self.__iso_format(date)}/json"
1925        )
1926        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
1927        explain_res = requests.get(url, headers=headers, **self._request_params)
1928        if explain_res.ok:
1929            ranking_dict = explain_res.json()
1930            rows = ranking_dict["rows"]
1931            stock_summary_url = f"/api/stock-summaries/{model_id}"
1932            stock_summary_body = {"gbiIds": ranking_dict["rows"]}
1933            summary_res = requests.post(
1934                self.base_uri + stock_summary_url,
1935                data=json.dumps(stock_summary_body),
1936                headers=headers,
1937                **self._request_params,
1938            )
1939            if summary_res.ok:
1940                stock_summary = summary_res.json()
1941                if index_by_symbol:
1942                    rows = [stock_summary[row]["symbol"] for row in ranking_dict["rows"]]
1943                elif index_by_all_metadata:
1944                    rows = [
1945                        [
1946                            stock_summary[row]["isin"],
1947                            stock_summary[row]["symbol"],
1948                            stock_summary[row]["currency"],
1949                            stock_summary[row]["country"],
1950                        ]
1951                        for row in ranking_dict["rows"]
1952                    ]
1953                else:
1954                    rows = [stock_summary[row]["isin"] for row in ranking_dict["rows"]]
1955            else:
1956                error_msg = self._try_extract_error_code(summary_res)
1957                logger.error(error_msg)
1958                raise BoostedAPIException(
1959                    "Failed to get isin information ranking explain: {0}.".format(error_msg)
1960                )
1961
1962            feature_name_dict = self.__get_rankings_ref_translation(model_id)
1963            columns = [feature_name_dict[col] for col in ranking_dict["columns"]]
1964
1965            id_col_name = "Symbols" if index_by_symbol else "ISINs"
1966
1967            if index_by_all_metadata:
1968                pc_list = []
1969                pf = ranking_dict["data"]
1970                for row_idx, row in enumerate(rows):
1971                    for col_idx, col in enumerate(columns):
1972                        pc_list.append([row, col] + pf[row_idx]["columns"][col_idx]["fields"])
1973                df = pd.DataFrame(pc_list)
1974                df = df.set_axis(
1975                    ["Metadata", "Feature Names"] + ranking_dict["fields"], axis="columns"
1976                )
1977
1978                metadata_df = df["Metadata"].apply(pd.Series)
1979                metadata_df.columns = pd.Index(["ISINs", "Symbol", "Currency", "Country"])
1980                result_df = pd.concat([metadata_df, df], axis=1).drop("Metadata", axis=1)
1981                result_df.set_index(
1982                    ["ISINs", "Symbol", "Currency", "Country", "Feature Names"], inplace=True
1983                )
1984                return result_df
1985
1986            else:
1987                df = protoCubeJsonDataToDataFrame(
1988                    ranking_dict["data"],
1989                    id_col_name,
1990                    rows,
1991                    "Feature Names",
1992                    columns,
1993                    ranking_dict["fields"],
1994                )
1995
1996                return df
1997        else:
1998            error_msg = self._try_extract_error_code(explain_res)
1999            logger.error(error_msg)
2000            raise BoostedAPIException("Failed to get ranking explain: {0}.".format(error_msg))

Gets the ranking 2.0 explain data for the given model on the given date

Parameters

model_id: str Model ID. Model IDs can be retrieved by clicking on the copy to clipboard button next to your model's name in the Model Summary Page in Boosted Insights. date: datetime.date or YYYY-MM-DD string Date of the data to retrieve. index_by_symbol: bool If true, index by stock symbol instead of ISIN. index_by_all_metadata: bool If true, index by all metadata: ISIN, stock symbol, currency, and country. Overrides index_by_symbol.

Returns

pandas.DataFrame Pandas DataFrame containing your data indexed by ISINs/Symbol/all metadata and feature names.


def getDenseSignalsForDate(self, portfolio_id, date, rollback_to_last_available_date):
2002    def getDenseSignalsForDate(self, portfolio_id, date, rollback_to_last_available_date):
2003        date = self.__iso_format(date)
2004        url = self.base_uri + f"/api/portfolios/{portfolio_id}/denseSignalsByDate"
2005        headers = {"Authorization": "ApiKey " + self.api_key}
2006        params = {
2007            "startDate": date,
2008            "endDate": date,
2009            "rollbackToMostRecentDate": rollback_to_last_available_date,
2010        }
2011        logger.info("Retrieving dense signals information for date {0}.".format(date))
2012        res = requests.get(url, params=params, headers=headers, **self._request_params)
2013        if res.ok:
2014            logger.info("Signals retrieval successful.")
2015            d = res.json()
2016            # reshape date to output format
2017            date = list(d["signals"].keys())[0]
2018            model_id = d["model_id"]
2019            signals_list = list(d["signals"].values())[0]
2020            return {"date": date, "signals": [{"model_id": model_id, "signals_info": signals_list}]}
2021        else:
2022            error_msg = self._try_extract_error_code(res)
2023            raise BoostedAPIException("Failed to retrieve dense signals: {0}.".format(error_msg))
def getDenseSignals(self, model_id, portfolio_id, file_name=None, location='./'):
2025    def getDenseSignals(self, model_id, portfolio_id, file_name=None, location="./"):
2026        url = self.base_uri + f"/api/models/{model_id}/{portfolio_id}/dense-signals"
2027        headers = {"Authorization": "ApiKey " + self.api_key}
2028        res = requests.get(url, headers=headers, **self._request_params)
2029        if file_name is None:
2030            file_name = f"{model_id}-{portfolio_id}_dense_signals.csv"
2031        download_location = os.path.join(location, file_name)
2032        if res.ok:
2033            with open(download_location, "wb") as file:
2034                file.write(res.content)
2035            print("Download Complete")
2036        elif res.status_code == 404:
2037            raise BoostedAPIException(
2038                f"""Dense Singals file does not exist for model:
2039                 {model_id} - portfolio: {portfolio_id}"""
2040            )
2041        else:
2042            error_msg = self._try_extract_error_code(res)
2043            logger.error(error_msg)
2044            raise BoostedAPIException(
2045                f"""Failed to download dense singals file for model:
2046                 {model_id} - portfolio: {portfolio_id}"""
2047            )
def getRanking2DateAnalysisFile(self, model_id, portfolio_id, date, file_name=None, location='./'):
2093    def getRanking2DateAnalysisFile(
2094        self, model_id, portfolio_id, date, file_name=None, location="./"
2095    ):
2096        formatted_date = self.__iso_format(date)
2097        s3_file_name = f"{formatted_date}_analysis.xlsx"
2098        download_url = (
2099            self.base_uri + f"/api/models/{model_id}/{portfolio_id}/ranking-file/{s3_file_name}"
2100        )
2101        headers = {"Authorization": "ApiKey " + self.api_key}
2102        if file_name is None:
2103            file_name = f"{model_id}-{portfolio_id}_statistical_analysis_{formatted_date}.xlsx"
2104        download_location = os.path.join(location, file_name)
2105
2106        res = requests.get(download_url, headers=headers, **self._request_params)
2107        if res.ok:
2108            with open(download_location, "wb") as file:
2109                file.write(res.content)
2110            print("Download Complete")
2111        elif res.status_code == 404:
2112            (
2113                is_portfolio_ready_for_processing,
2114                portfolio_ready_status,
2115            ) = self._getIsPortfolioReadyForProcessing(model_id, portfolio_id, formatted_date)
2116
2117            if not is_portfolio_ready_for_processing:
2118                logger.info(
2119                    f"""\nPortfolio {portfolio_id} for model {model_id}
2120                    on date {date} unavailable for Ranking2Date Analysis file.
2121                    Status: {portfolio_ready_status}\n"""
2122                )
2123                return
2124
2125            generate_url = (
2126                self.base_uri
2127                + f"/api/explain-trades/{model_id}/{portfolio_id}"
2128                + f"/generate/date-data/{formatted_date}"
2129            )
2130
2131            generate_res = requests.get(generate_url, headers=headers, **self._request_params)
2132            if generate_res.ok:
2133                download_res = requests.get(download_url, headers=headers, **self._request_params)
2134                while download_res.status_code == 404 or (
2135                    download_res.ok and len(download_res.content) == 0
2136                ):
2137                    print("waiting for file to be generated")
2138                    time.sleep(5)
2139                    download_res = requests.get(
2140                        download_url, headers=headers, **self._request_params
2141                    )
2142                if download_res.ok:
2143                    with open(download_location, "wb") as file:
2144                        file.write(download_res.content)
2145                    print("Download Complete")
2146            else:
2147                error_msg = self._try_extract_error_code(res)
2148                logger.error(error_msg)
2149                raise BoostedAPIException(
2150                    f"""Failed to generate ranking analysis file for model:
2151                    {model_id} - portfolio: {portfolio_id} on date: {formatted_date}"""
2152                )
2153        else:
2154            error_msg = self._try_extract_error_code(res)
2155            logger.error(error_msg)
2156            raise BoostedAPIException(
2157                f"""Failed to download ranking analysis file for model:
2158                 {model_id} - portfolio: {portfolio_id} on date: {formatted_date}"""
2159            )
def getRanking2DateExplainFile( self, model_id, portfolio_id, date, file_name=None, location='./', overwrite: bool = False, index_by_all_metadata: bool = False):
2161    def getRanking2DateExplainFile(
2162        self,
2163        model_id,
2164        portfolio_id,
2165        date,
2166        file_name=None,
2167        location="./",
2168        overwrite: bool = False,
2169        index_by_all_metadata: bool = False,
2170    ):
2171        """
2172        Downloads the ranking explain file for the provided portfolio and model.
2173        If no file exists then it will send a request to generate the file and continuously
2174        poll the server every 5 seconds to try and download the file until the file is downloaded.
2175
2176        Parameters
2177        ----------
2178        model_id: str
2179            Model ID.  Model IDs can be retrieved by clicking on the copy to clipboard
2180            button next to your model's name in the Model Summary Page in Boosted
2181            Insights.
2182        portfolio_id: str
2183            Portfolio ID.  Portfolio IDs can be retrieved from portfolio's configuration page.
2184        date: datetime.date or YYYY-MM-DD string
2185            Date of the data to retrieve.
2186        file_name: str
2187            File name of the dense signals file to save as.
2188            If no file name is given the file name will be
2189            "<model_id>-<portfolio_id>_explain_data_<date>.xlsx"
2190        location: str
2191            The location to save the file to.
2192            If no location is given then it will be saved to the current directory.
2193        overwrite: bool
2194            Defaults to False, set to True to regenerate the file.
2195        index_by_all_metadata: bool
2196            If true, index by all metadata: ISIN, stock symbol, currency, and country.
2197
2198
2199        Returns
2200        -------
2201        None
2202        ___
2203        """
2204        formatted_date = self.__iso_format(date)
2205        if index_by_all_metadata:
2206            s3_file_name = f"{formatted_date}_explaindata_withmetadata.xlsx"
2207        else:
2208            s3_file_name = f"{formatted_date}_explaindata.xlsx"
2209        download_url = (
2210            self.base_uri + f"/api/models/{model_id}/{portfolio_id}/ranking-file/{s3_file_name}"
2211        )
2212        headers = {"Authorization": "ApiKey " + self.api_key}
2213        if file_name is None:
2214            file_name = f"{model_id}-{portfolio_id}_explain_data_{formatted_date}.xlsx"
2215        download_location = os.path.join(location, file_name)
2216
2217        if not overwrite:
2218            res = requests.get(download_url, headers=headers, **self._request_params)
2219        if not overwrite and res.ok:
2220            with open(download_location, "wb") as file:
2221                file.write(res.content)
2222            print("Download Complete")
2223        elif overwrite or res.status_code == 404:
2224            (
2225                is_portfolio_ready_for_processing,
2226                portfolio_ready_status,
2227            ) = self._getIsPortfolioReadyForProcessing(model_id, portfolio_id, formatted_date)
2228
2229            if not is_portfolio_ready_for_processing:
2230                logger.info(
2231                    f"""\nPortfolio {portfolio_id} for model {model_id}
2232                    on date {date} unavailable for Ranking2Date Explain file.
2233                    Status: {portfolio_ready_status}\n"""
2234                )
2235                return
2236
2237            generate_url = (
2238                self.base_uri
2239                + f"/api/explain-trades/{model_id}/{portfolio_id}"
2240                + f"/generate/date-data/{formatted_date}"
2241                + f"/{'true' if index_by_all_metadata else 'false'}"
2242            )
2243
2244            generate_res = requests.get(generate_url, headers=headers, **self._request_params)
2245            if generate_res.ok:
2246                download_res = requests.get(download_url, headers=headers, **self._request_params)
2247                while download_res.status_code == 404 or (
2248                    download_res.ok and len(download_res.content) == 0
2249                ):
2250                    print("waiting for file to be generated")
2251                    time.sleep(5)
2252                    download_res = requests.get(
2253                        download_url, headers=headers, **self._request_params
2254                    )
2255                if download_res.ok:
2256                    with open(download_location, "wb") as file:
2257                        file.write(download_res.content)
2258                    print("Download Complete")
2259            else:
2260                error_msg = self._try_extract_error_code(res)
2261                logger.error(error_msg)
2262                raise BoostedAPIException(
2263                    f"""Failed to generate ranking explain file for model:
2264                    {model_id} - portfolio: {portfolio_id} on date: {formatted_date}"""
2265                )
2266        else:
2267            error_msg = self._try_extract_error_code(res)
2268            logger.error(error_msg)
2269            raise BoostedAPIException(
2270                f"""Failed to download ranking explain file for model:
2271                 {model_id} - portfolio: {portfolio_id} on date: {formatted_date}"""
2272            )

Downloads the ranking explain file for the provided portfolio and model. If no file exists then it will send a request to generate the file and continuously poll the server every 5 seconds to try and download the file until the file is downloaded.

Parameters

model_id: str Model ID. Model IDs can be retrieved by clicking on the copy to clipboard button next to your model's name in the Model Summary Page in Boosted Insights. portfolio_id: str Portfolio ID. Portfolio IDs can be retrieved from portfolio's configuration page. date: datetime.date or YYYY-MM-DD string Date of the data to retrieve. file_name: str File name of the dense signals file to save as. If no file name is given the file name will be "-_explain_data_.xlsx" location: str The location to save the file to. If no location is given then it will be saved to the current directory. overwrite: bool Defaults to False, set to True to regenerate the file. index_by_all_metadata: bool If true, index by all metadata: ISIN, stock symbol, currency, and country.

Returns

None


def getRanking2DateExplain( self, model_id: str, portfolio_id: str, date: Union[datetime.date, NoneType], overwrite: bool = False) -> Dict[str, pandas.core.frame.DataFrame]:
2274    def getRanking2DateExplain(
2275        self,
2276        model_id: str,
2277        portfolio_id: str,
2278        date: Optional[datetime.date],
2279        overwrite: bool = False,
2280    ) -> Dict[str, pd.DataFrame]:
2281        """
2282        Wrapper around getRanking2DateExplainFile, but returns a pandas
2283        dataframe instead of downloading to a path. Dataframe is indexed by
2284        symbol and should always have 'rating' and 'rating_delta' columns. Other
2285        columns will be determined by model's features.
2286        """
2287        file_name = "explaindata.xlsx"
2288        with tempfile.TemporaryDirectory() as tmpdirname:
2289            self.getRanking2DateExplainFile(
2290                model_id=model_id,
2291                portfolio_id=portfolio_id,
2292                date=date,
2293                file_name=file_name,
2294                location=tmpdirname,
2295                overwrite=overwrite,
2296            )
2297            full_path = os.path.join(tmpdirname, file_name)
2298            excel_file = pd.ExcelFile(full_path)
2299            df_map = pd.read_excel(excel_file, sheet_name=None)
2300            df_map_final = {str(sheet): df.set_index("Symbol") for (sheet, df) in df_map.items()}
2301
2302        return df_map_final

Wrapper around getRanking2DateExplainFile, but returns a pandas dataframe instead of downloading to a path. Dataframe is indexed by symbol and should always have 'rating' and 'rating_delta' columns. Other columns will be determined by model's features.

def getTearSheet( self, model_id, portfolio_id, start_date=None, end_date=None, block=False):
2304    def getTearSheet(self, model_id, portfolio_id, start_date=None, end_date=None, block=False):
2305        if start_date is None or end_date is None:
2306            if start_date is not None or end_date is not None:
2307                raise ValueError("start_date and end_date must both be None or both be defined")
2308            return self._getCurrentTearSheet(model_id, portfolio_id)
2309
2310        start_date_obj = self.__to_date_obj(start_date)
2311        end_date_obj = self.__to_date_obj(end_date)
2312        if start_date_obj >= end_date_obj:
2313            raise ValueError("end_date must be later than the start_date")
2314
2315        # get for the given date
2316        url = self.base_uri + f"/api/analysis/keyfacts/{model_id}/{portfolio_id}"
2317        data = {
2318            "startDate": self.__iso_format(start_date),
2319            "endDate": self.__iso_format(end_date),
2320            "shouldRecalc": True,
2321        }
2322        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
2323        res = requests.post(url, data=json.dumps(data), headers=headers, **self._request_params)
2324        if res.status_code == 404 and block:
2325            retries = 0
2326            data["shouldRecalc"] = False
2327            while retries < 10:
2328                time.sleep(10)
2329                retries += 1
2330                res = requests.post(
2331                    url, data=json.dumps(data), headers=headers, **self._request_params
2332                )
2333                if res.status_code != 404:
2334                    break
2335        if res.ok:
2336            return res.json()
2337        else:
2338            error_msg = self._try_extract_error_code(res)
2339            logger.error(error_msg)
2340            raise BoostedAPIException(
2341                "Failed to get tear sheet data: {0} {1}.".format(error_msg, str(res.status_code))
2342            )
def getPortfolioStatus(self, model_id, portfolio_id, job_date):
2356    def getPortfolioStatus(self, model_id, portfolio_id, job_date):
2357        url = (
2358            self.base_uri
2359            + f"/api/analysis/portfolioStatus/{model_id}/{portfolio_id}?jobDate={job_date}"
2360        )
2361        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
2362        res = requests.get(url, headers=headers, **self._request_params)
2363        if res.ok:
2364            result = res.json()
2365            return {
2366                "is_complete": result["status"],
2367                "last_update": None if result["lastUpdate"] is None else result["lastUpdate"][:10],
2368                "next_update": None if result["nextUpdate"] is None else result["nextUpdate"][:10],
2369            }
2370        else:
2371            error_msg = self._try_extract_error_code(res)
2372            logger.error(error_msg)
2373            raise BoostedAPIException("Failed to get portfolio status: {0}".format(error_msg))
def get_portfolio_factor_attribution( self, portfolio_id: str, start_date: Union[datetime.date, str, NoneType] = None, end_date: Union[datetime.date, str, NoneType] = None):
2392    def get_portfolio_factor_attribution(
2393        self,
2394        portfolio_id: str,
2395        start_date: Optional[BoostedDate] = None,
2396        end_date: Optional[BoostedDate] = None,
2397    ):
2398        """Get portfolio factor attribution for a portfolio
2399
2400        Args:
2401            portfolio_id (str): a valid UUID string
2402            start_date (BoostedDate, optional): The start date. Defaults to None.
2403            end_date (BoostedDate, optional): The end date. Defaults to None.
2404        """
2405        response = self._query_portfolio_factor_attribution(portfolio_id, start_date, end_date)
2406        factor_attribution = response["data"]["portfolio"]["factorAttribution"]
2407        dates = pd.DatetimeIndex(data=factor_attribution["dates"])
2408        beta = factor_attribution["factorBetas"]
2409        beta_df = pd.DataFrame(index=dates, data={x["name"]: x["data"] for x in beta})
2410        beta_df = beta_df.add_suffix("_beta")
2411        returns = factor_attribution["portfolioFactorPerformance"]
2412        returns_df = pd.DataFrame(index=dates, data={x["name"]: x["data"] for x in returns})
2413        returns_df = returns_df.add_suffix("_return")
2414        returns_df = (returns_df - 1) * 100
2415
2416        final_df = pd.concat([returns_df, beta_df], axis=1)
2417        ordered_columns = list(itertools.chain(*zip(returns_df.columns, beta_df.columns)))
2418        ordered_final_df = final_df.reindex(columns=ordered_columns)
2419
2420        # Add the column `total_return` which is the sum of returns_data
2421        ordered_final_df["total_return"] = returns_df.sum(axis=1)
2422        return ordered_final_df

Get portfolio factor attribution for a portfolio

Args: portfolio_id (str): a valid UUID string start_date (BoostedDate, optional): The start date. Defaults to None. end_date (BoostedDate, optional): The end date. Defaults to None.

def getBlacklist(self, blacklist_id):
2424    def getBlacklist(self, blacklist_id):
2425        url = self.base_uri + f"/api/blacklist/{blacklist_id}"
2426        headers = {"Authorization": "ApiKey " + self.api_key}
2427        res = requests.get(url, headers=headers, **self._request_params)
2428        if res.ok:
2429            result = res.json()
2430            return result
2431        error_msg = self._try_extract_error_code(res)
2432        logger.error(error_msg)
2433        raise BoostedAPIException(f"Failed to get blacklist with id {blacklist_id}: {error_msg}")
def getBlacklists(self, model_id=None, company_id=None, last_N=None):
2435    def getBlacklists(self, model_id=None, company_id=None, last_N=None):
2436        params = {}
2437        if last_N:
2438            params["lastN"] = last_N
2439        if model_id:
2440            params["modelId"] = model_id
2441        if company_id:
2442            params["companyId"] = company_id
2443        url = self.base_uri + f"/api/blacklist"
2444        headers = {"Authorization": "ApiKey " + self.api_key}
2445        res = requests.get(url, headers=headers, params=params, **self._request_params)
2446        if res.ok:
2447            result = res.json()
2448            return result
2449        error_msg = self._try_extract_error_code(res)
2450        logger.error(error_msg)
2451        raise BoostedAPIException(
2452            f"""Failed to get blacklists with \
2453            model_id {model_id} company_id {company_id} last_N {last_N}: {error_msg}"""
2454        )
def createBlacklist( self, isin, long_short=2, start_date=datetime.date(2024, 10, 7), end_date='4000-01-01', model_id=None):
2456    def createBlacklist(
2457        self,
2458        isin,
2459        long_short=2,
2460        start_date=datetime.date.today(),
2461        end_date="4000-01-01",
2462        model_id=None,
2463    ):
2464        url = self.base_uri + f"/api/blacklist"
2465        data = {
2466            "modelId": model_id,
2467            "isin": isin,
2468            "longShort": long_short,
2469            "startDate": self.__iso_format(start_date),
2470            "endDate": self.__iso_format(end_date),
2471        }
2472        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
2473        res = requests.post(url, data=json.dumps(data), headers=headers, **self._request_params)
2474        if res.ok:
2475            return res.json()
2476        else:
2477            error_msg = self._try_extract_error_code(res)
2478            logger.error(error_msg)
2479            raise BoostedAPIException(
2480                f"""Failed to create the blacklist with \
2481                  isin {isin} long_short {long_short} start_date {start_date} end_date {end_date} \
2482                  model_id {model_id}: {error_msg}."""
2483            )
def createBlacklistsFromCSV(self, csv_name):
2485    def createBlacklistsFromCSV(self, csv_name):
2486        url = self.base_uri + f"/api/blacklists"
2487        data = []
2488        with open(csv_name, mode="r") as f:
2489            csv_reader = csv.DictReader(f)
2490            for row in csv_reader:
2491                blacklist = {"modelId": row["ModelID"], "isin": row["ISIN"]}
2492                if not row.get("LongShort"):
2493                    blacklist["longShort"] = 2
2494                else:
2495                    blacklist["longShort"] = row["LongShort"]
2496
2497                if not row.get("StartDate"):
2498                    blacklist["startDate"] = self.__iso_format(datetime.date.today())
2499                else:
2500                    blacklist["startDate"] = self.__iso_format(row["StartDate"])
2501
2502                if not row.get("EndDate"):
2503                    blacklist["endDate"] = self.__iso_format("4000-01-01")
2504                else:
2505                    blacklist["endDate"] = self.__iso_format(row["EndDate"])
2506                data.append(blacklist)
2507        print(f"Processed {len(data)} blacklists.")
2508        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
2509        res = requests.post(url, data=json.dumps(data), headers=headers, **self._request_params)
2510        if res.ok:
2511            return res.json()
2512        else:
2513            error_msg = self._try_extract_error_code(res)
2514            logger.error(error_msg)
2515            raise BoostedAPIException("failed to create blacklists")
def updateBlacklist(self, blacklist_id, long_short=None, start_date=None, end_date=None):
2517    def updateBlacklist(self, blacklist_id, long_short=None, start_date=None, end_date=None):
2518        params = {}
2519        if long_short:
2520            params["longShort"] = long_short
2521        if start_date:
2522            params["startDate"] = start_date
2523        if end_date:
2524            params["endDate"] = end_date
2525        url = self.base_uri + f"/api/blacklist/{blacklist_id}"
2526        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
2527        res = requests.patch(url, json=params, headers=headers, **self._request_params)
2528        if res.ok:
2529            return res.json()
2530        else:
2531            error_msg = self._try_extract_error_code(res)
2532            logger.error(error_msg)
2533            raise BoostedAPIException(
2534                f"Failed to update blacklist with id {blacklist_id}: {error_msg}"
2535            )
def deleteBlacklist(self, blacklist_id):
2537    def deleteBlacklist(self, blacklist_id):
2538        url = self.base_uri + f"/api/blacklist/{blacklist_id}"
2539        headers = {"Authorization": "ApiKey " + self.api_key}
2540        res = requests.delete(url, headers=headers, **self._request_params)
2541        if res.ok:
2542            result = res.json()
2543            return result
2544        else:
2545            error_msg = self._try_extract_error_code(res)
2546            logger.error(error_msg)
2547            raise BoostedAPIException(
2548                f"Failed to delete blacklist with id {blacklist_id}: {error_msg}"
2549            )
def getFeatureImportance(self, model_id, date, N=None):
2551    def getFeatureImportance(self, model_id, date, N=None):
2552        url = self.base_uri + f"/api/analysis/explainability/{model_id}"
2553        headers = {"Authorization": "ApiKey " + self.api_key}
2554        logger.info("Retrieving rankings information for date {0}.".format(date))
2555        res = requests.get(url, headers=headers, **self._request_params)
2556        if not res.ok:
2557            error_msg = self._try_extract_error_code(res)
2558            logger.error(error_msg)
2559            raise BoostedAPIException(
2560                f"Failed to fetch feature importance for model/portfolio {model_id}: {error_msg}"
2561            )
2562
2563        json_data = res.json()
2564        if "all" not in json_data.keys() or not json_data["all"]:
2565            raise BoostedAPIException(f"Unexpected formatting of feature importance response")
2566
2567        feature_data = json_data["all"]
2568        # find the right period (assuming returned json has dates in descending order)
2569        date_obj = self.__to_date_obj(date)
2570        start_date_for_return_data = self.__to_date_obj(feature_data[0]["date"])
2571        features_for_requested_period = None
2572
2573        if date_obj > start_date_for_return_data:
2574            features_for_requested_period = feature_data[0]["variable"]
2575        else:
2576            i = 0
2577            while i < len(feature_data) - 1:
2578                current_date = self.__to_date_obj(feature_data[i]["date"])
2579                next_date = self.__to_date_obj(feature_data[i + 1]["date"])
2580                if next_date <= date_obj <= current_date:
2581                    features_for_requested_period = feature_data[i + 1]["variable"]
2582                    start_date_for_return_data = next_date
2583                    break
2584                i += 1
2585
2586        if features_for_requested_period is None:
2587            raise BoostedAPIException(f"No feature data was found for requested date: {date_obj}")
2588
2589        features_for_requested_period.sort(key=lambda x: x["value"], reverse=True)
2590
2591        if type(N) is int and N > 0:
2592            df = pd.DataFrame.from_dict(features_for_requested_period[0:N])
2593        else:
2594            df = pd.DataFrame.from_dict(features_for_requested_period)
2595        result = df[["feature", "value"]]
2596
2597        return result.rename(columns={"feature": f"feature ({start_date_for_return_data})"})
def getAllModelNames(self) -> Dict[str, str]:
2599    def getAllModelNames(self) -> Dict[str, str]:
2600        url = f"{self.base_uri}/api/graphql"
2601        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
2602        req_json = {"query": "query listOfModels {\n models { id name }}", "variables": {}}
2603        res = requests.post(url, json=req_json, headers=headers, **self._request_params)
2604        if not res.ok:
2605            error_msg = self._try_extract_error_code(res)
2606            logger.error(error_msg)
2607            raise BoostedAPIException(f"Failed to get user models: {error_msg}")
2608        data = res.json()
2609        if data["data"]["models"] is None:
2610            return {}
2611        return {rec["id"]: rec["name"] for rec in data["data"]["models"]}
def getAllModelDetails(self) -> Dict[str, Dict[str, Any]]:
2613    def getAllModelDetails(self) -> Dict[str, Dict[str, Any]]:
2614        url = f"{self.base_uri}/api/graphql"
2615        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
2616        req_json = {
2617            "query": "query listOfModels {\n models { id name lastUpdated portfolios { id name }}}",
2618            "variables": {},
2619        }
2620        res = requests.post(url, json=req_json, headers=headers, **self._request_params)
2621        if not res.ok:
2622            error_msg = self._try_extract_error_code(res)
2623            logger.error(error_msg)
2624            raise BoostedAPIException(f"Failed to get user models: {error_msg}")
2625        data = res.json()
2626        if data["data"]["models"] is None:
2627            return {}
2628
2629        output_data = {}
2630        for rec in data["data"]["models"]:
2631            model_id = rec["id"]
2632            output_data[model_id] = {
2633                "name": rec["name"],
2634                "last_updated": parser.parse(rec["lastUpdated"]),
2635                "portfolios": rec["portfolios"],
2636            }
2637
2638        return output_data
def get_hedge_experiments(self):
2640    def get_hedge_experiments(self):
2641        url = self.base_uri + "/api/graphql"
2642        qry = """
2643            query getHedgeExperiments {
2644                hedgeExperiments {
2645                    hedgeExperimentId
2646                    experimentName
2647                    userId
2648                    config
2649                    description
2650                    experimentType
2651                    lastCalculated
2652                    lastModified
2653                    status
2654                    portfolioCalcStatus
2655                    targetSecurities {
2656                        gbiId
2657                        security {
2658                            gbiId
2659                            symbol
2660                            name
2661                        }
2662                        weight
2663                    }
2664                    targetPortfolios {
2665                        portfolioId
2666                    }
2667                    baselineModel {
2668                        id
2669                        name
2670
2671                    }
2672                    baselineScenario {
2673                        hedgeExperimentScenarioId
2674                        scenarioName
2675                        description
2676                        portfolioSettingsJson
2677                        hedgeExperimentPortfolios {
2678                            portfolio {
2679                                id
2680                                name
2681                                modelId
2682                                performanceGridHeader
2683                                performanceGrid
2684                                status
2685                                tearSheet {
2686                                    groupName
2687                                    members {
2688                                        name
2689                                        value
2690                                    }
2691                                }
2692                            }
2693                        }
2694                        status
2695                    }
2696                    baselineStockUniverseId
2697                }
2698            }
2699        """
2700
2701        headers = {"Authorization": "ApiKey " + self.api_key}
2702        resp = requests.post(url, json={"query": qry}, headers=headers, params=self._request_params)
2703
2704        json_resp = resp.json()
2705        # graphql endpoints typically return 200 or 400 status codes, so we must
2706        # check if we have any errors, even with a 200
2707        if (resp.ok and "errors" in json_resp) or not resp.ok:
2708            error_msg = self._try_extract_error_code(resp)
2709            logger.error(error_msg)
2710            raise BoostedAPIException(
2711                (f"Failed to get hedge experiments: {resp.status_code=}; {error_msg=}")
2712            )
2713
2714        json_experiments = resp.json()["data"]["hedgeExperiments"]
2715        experiments = [HedgeExperiment.from_json_dict(exp_json) for exp_json in json_experiments]
2716        return experiments
def get_hedge_experiment_details(self, experiment_id: str):
2718    def get_hedge_experiment_details(self, experiment_id: str):
2719        url = self.base_uri + "/api/graphql"
2720        qry = """
2721            query getHedgeExperimentDetails($hedgeExperimentId: ID!) {
2722                hedgeExperiment(hedgeExperimentId: $hedgeExperimentId) {
2723                ...HedgeExperimentDetailsSummaryListFragment
2724                }
2725            }
2726
2727            fragment HedgeExperimentDetailsSummaryListFragment on HedgeExperiment {
2728                hedgeExperimentId
2729                experimentName
2730                userId
2731                config
2732                description
2733                experimentType
2734                lastCalculated
2735                lastModified
2736                status
2737                portfolioCalcStatus
2738                targetSecurities {
2739                    gbiId
2740                    security {
2741                        gbiId
2742                        symbol
2743                        name
2744                    }
2745                    weight
2746                }
2747                selectedModels {
2748                    id
2749                    name
2750                    stockUniverse {
2751                        name
2752                    }
2753                }
2754                hedgeExperimentScenarios {
2755                    ...experimentScenarioFragment
2756                }
2757                selectedDummyHedgeExperimentModels {
2758                    id
2759                    name
2760                    stockUniverse {
2761                        name
2762                    }
2763                }
2764                targetPortfolios {
2765                    portfolioId
2766                }
2767                baselineModel {
2768                    id
2769                    name
2770
2771                }
2772                baselineScenario {
2773                    hedgeExperimentScenarioId
2774                    scenarioName
2775                    description
2776                    portfolioSettingsJson
2777                    hedgeExperimentPortfolios {
2778                        portfolio {
2779                            id
2780                            name
2781                            modelId
2782                            performanceGridHeader
2783                            performanceGrid
2784                            status
2785                            tearSheet {
2786                                groupName
2787                                members {
2788                                    name
2789                                    value
2790                                }
2791                            }
2792                        }
2793                    }
2794                    status
2795                }
2796                baselineStockUniverseId
2797            }
2798
2799            fragment experimentScenarioFragment on HedgeExperimentScenario {
2800                hedgeExperimentScenarioId
2801                scenarioName
2802                status
2803                description
2804                portfolioSettingsJson
2805                hedgeExperimentPortfolios {
2806                    portfolio {
2807                        id
2808                        name
2809                        modelId
2810                        performanceGridHeader
2811                        performanceGrid
2812                        status
2813                        tearSheet {
2814                            groupName
2815                            members {
2816                                name
2817                                value
2818                            }
2819                        }
2820                    }
2821                }
2822            }
2823        """
2824        headers = {"Authorization": "ApiKey " + self.api_key}
2825        resp = requests.post(
2826            url,
2827            json={"query": qry, "variables": {"hedgeExperimentId": experiment_id}},
2828            headers=headers,
2829            params=self._request_params,
2830        )
2831
2832        json_resp = resp.json()
2833        # graphql endpoints typically return 200 or 400 status codes, so we must
2834        # check if we have any errors, even with a 200
2835        if (resp.ok and "errors" in json_resp) or not resp.ok:
2836            error_msg = self._try_extract_error_code(resp)
2837            logger.error(error_msg)
2838            raise BoostedAPIException(
2839                (
2840                    f"Failed to get hedge experiment results for {experiment_id=}: "
2841                    f"{resp.status_code=}; {error_msg=}"
2842                )
2843            )
2844
2845        json_exp_results = json_resp["data"]["hedgeExperiment"]
2846        if json_exp_results is None:
2847            return None  # issued a request with a non-existent experiment_id
2848        exp_results = HedgeExperimentDetails.from_json_dict(json_exp_results)
2849        return exp_results
def get_portfolio_performance( self, portfolio_id: str, start_date: Union[datetime.date, NoneType], end_date: Union[datetime.date, NoneType], daily_returns: bool) -> pandas.core.frame.DataFrame:
2851    def get_portfolio_performance(
2852        self,
2853        portfolio_id: str,
2854        start_date: Optional[datetime.date],
2855        end_date: Optional[datetime.date],
2856        daily_returns: bool,
2857    ) -> pd.DataFrame:
2858        """
2859        Get performance data for a portfolio.
2860
2861        Parameters
2862        ----------
2863        portfolio_id: str
2864            UUID corresponding to the portfolio in question.
2865        start_date: datetime.date
2866            Starting cutoff date to filter performance data
2867        end_date: datetime.date
2868            Ending cutoff date to filter performance data
2869        daily_returns: bool
2870            Flag indicating whether to add a new column with the daily return pct calculated
2871
2872        Returns
2873        -------
2874        pd.DataFrame object
2875            Portfolio and benchmark performance.
2876            -index:
2877                "date": pd.DatetimeIndex
2878            -columns:
2879                "benchmark": benchmark performance, % return
2880                "turnover": portfolio turnover, % of equity
2881                "portfolio": return since beginning of portfolio, % return
2882                "daily_returns": daily percent change in value of the portfolio, % return
2883                                (this column is optional and depends on the daily_returns flag)
2884        """
2885        url = f"{self.base_uri}/api/graphql"
2886        qry = """
2887            query getPortfolioPerformance($portfolioId: ID!) {
2888                portfolio(id: $portfolioId) {
2889                    id
2890                    modelId
2891                    name
2892                    status
2893                    performance {
2894                        benchmark
2895                        date
2896                        turnover
2897                        value
2898                    }
2899                }
2900            }
2901        """
2902
2903        headers = {"Authorization": "ApiKey " + self.api_key}
2904        resp = requests.post(
2905            url,
2906            json={"query": qry, "variables": {"portfolioId": portfolio_id}},
2907            headers=headers,
2908            params=self._request_params,
2909        )
2910
2911        json_resp = resp.json()
2912        # the webserver returns an error for non-ready portfolios, so we have to check
2913        # for this prior to the error check below
2914        pf = json_resp["data"].get("portfolio")
2915        if pf is not None and pf["status"] != "READY":
2916            return pd.DataFrame()
2917
2918        # graphql endpoints typically return 200 or 400 status codes, so we must
2919        # check if we have any errors, even with a 200
2920        if (resp.ok and "errors" in json_resp) or not resp.ok:
2921            error_msg = self._try_extract_error_code(resp)
2922            logger.error(error_msg)
2923            raise BoostedAPIException(
2924                (
2925                    f"Failed to get portfolio performance for {portfolio_id=}: "
2926                    f"{resp.status_code=}; {error_msg=}"
2927                )
2928            )
2929
2930        perf = json_resp["data"]["portfolio"]["performance"]
2931        df = pd.DataFrame(perf).set_index("date").rename(columns={"value": "portfolio"})
2932        df.index = pd.to_datetime(df.index)
2933        if daily_returns:
2934            df["daily_returns"] = pd.to_numeric(df["portfolio"]).pct_change()
2935            df = df.dropna(subset=["daily_returns"])
2936        if start_date:
2937            df = df[df.index >= pd.to_datetime(start_date)]
2938        if end_date:
2939            df = df[df.index <= pd.to_datetime(end_date)]
2940        return df.astype(float)

Get performance data for a portfolio.

Parameters

portfolio_id: str UUID corresponding to the portfolio in question. start_date: datetime.date Starting cutoff date to filter performance data end_date: datetime.date Ending cutoff date to filter performance data daily_returns: bool Flag indicating whether to add a new column with the daily return pct calculated

Returns

pd.DataFrame object Portfolio and benchmark performance. -index: "date": pd.DatetimeIndex -columns: "benchmark": benchmark performance, % return "turnover": portfolio turnover, % of equity "portfolio": return since beginning of portfolio, % return "daily_returns": daily percent change in value of the portfolio, % return (this column is optional and depends on the daily_returns flag)

def get_portfolio_factors(self, model_id: str, portfolio_id: str) -> pandas.core.frame.DataFrame:
2949    def get_portfolio_factors(self, model_id: str, portfolio_id: str) -> pd.DataFrame:
2950        url = f"{self.base_uri}/api/analysis/factors/{model_id}/{portfolio_id}"
2951        headers = {"Authorization": "ApiKey " + self.api_key}
2952        resp = requests.get(url, headers=headers, params=self._request_params)
2953
2954        json_resp = resp.json()
2955        if (resp.ok and "errors" in json_resp) or not resp.ok:
2956            error_msg = json_resp["errors"][0]
2957            if self._is_portfolio_still_running(error_msg):
2958                return pd.DataFrame()
2959            logger.error(error_msg)
2960            raise BoostedAPIException(
2961                (
2962                    f"Failed to get portfolio factors for {portfolio_id=}: "
2963                    f"{resp.status_code=}; {error_msg=}"
2964                )
2965            )
2966
2967        df = pd.DataFrame(json_resp["data"], columns=json_resp["header_row"])
2968
2969        def to_lower_snake_case(s):  # why are we linting lambdas? :(
2970            return "_".join(w.lower() for w in s.split(" "))
2971
2972        df = df.rename(columns={old: to_lower_snake_case(old) for old in df.columns}).set_index(
2973            "date"
2974        )
2975        df.index = pd.to_datetime(df.index)
2976        return df
def get_portfolio_volatility(self, model_id: str, portfolio_id: str) -> pandas.core.frame.DataFrame:
2978    def get_portfolio_volatility(self, model_id: str, portfolio_id: str) -> pd.DataFrame:
2979        url = f"{self.base_uri}/api/analysis/volatility_rolling/{model_id}/{portfolio_id}"
2980        headers = {"Authorization": "ApiKey " + self.api_key}
2981        resp = requests.get(url, headers=headers, params=self._request_params)
2982
2983        json_resp = resp.json()
2984        if (resp.ok and "errors" in json_resp) or not resp.ok:
2985            error_msg = json_resp["errors"][0]
2986            if self._is_portfolio_still_running(error_msg):
2987                return pd.DataFrame()
2988            logger.error(error_msg)
2989            raise BoostedAPIException(
2990                (
2991                    f"Failed to get portfolio volatility for {portfolio_id=}: "
2992                    f"{resp.status_code=}; {error_msg=}"
2993                )
2994            )
2995
2996        df = pd.DataFrame(json_resp["data"], columns=json_resp["headerRow"])
2997        df = df.rename(
2998            columns={old: old.lower().replace("avg", "avg_") for old in df.columns}  # type: ignore
2999        ).set_index("date")
3000        df.index = pd.to_datetime(df.index)
3001        return df
def get_portfolio_holdings(self, model_id: str, portfolio_id: str) -> pandas.core.frame.DataFrame:
3003    def get_portfolio_holdings(self, model_id: str, portfolio_id: str) -> pd.DataFrame:
3004        url = f"{self.base_uri}/api/models/{model_id}/{portfolio_id}/basket-data"
3005        headers = {"Authorization": "ApiKey " + self.api_key}
3006        resp = requests.get(url, headers=headers, params=self._request_params)
3007
3008        # this is a classic abuse of try/except as control flow: we try to get json body
3009        # from the response so that we can error-check. if this fails, we assume we have
3010        # a legit text response (corresponding to the csv data we care about)
3011        try:
3012            json_resp = resp.json()
3013        except json.decoder.JSONDecodeError:
3014            df = pd.read_csv(io.StringIO(resp.text), header=[0])
3015        else:
3016            error_msg = json_resp["errors"][0]
3017            if self._is_portfolio_still_running(error_msg):
3018                return pd.DataFrame()
3019            else:
3020                logger.error(error_msg)
3021                raise BoostedAPIException(
3022                    (
3023                        f"Failed to get portfolio holdings for {portfolio_id=}: "
3024                        f"{resp.status_code=}; {error_msg=}"
3025                    )
3026                )
3027
3028        df = df.rename(columns={old: old.lower() for old in df.columns}).set_index("date")
3029        df.index = pd.to_datetime(df.index)
3030        return df
def getStockDataTableForDate( self, model_id: str, portfolio_id: str, date: datetime.date) -> pandas.core.frame.DataFrame:
3032    def getStockDataTableForDate(
3033        self, model_id: str, portfolio_id: str, date: datetime.date
3034    ) -> pd.DataFrame:
3035        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
3036
3037        url_base = f"{self.base_uri}/api/analysis"
3038        url_params = f"{model_id}/{portfolio_id}"
3039        formatted_date = date.strftime("%Y-%m-%d")
3040
3041        stock_prices_url = f"{url_base}/stock-prices/{url_params}/{formatted_date}"
3042        stock_factors_url = f"{url_base}/stock-factors/{url_params}/date/{formatted_date}"
3043
3044        prices_params = {"useTicker": "false", "useCurrentSignals": "true"}
3045        factors_param = {"useTicker": "false", "useCurrentSignals": "true"}
3046
3047        prices_resp = requests.get(
3048            stock_prices_url, headers=headers, params=prices_params, **self._request_params
3049        )
3050        factors_resp = requests.get(
3051            stock_factors_url, headers=headers, params=factors_param, **self._request_params
3052        )
3053
3054        frames = []
3055        gbi_ids = set()
3056        for res in (prices_resp, factors_resp):
3057            if not res.ok:
3058                error_msg = self._try_extract_error_code(res)
3059                logger.error(error_msg)
3060                raise BoostedAPIException(
3061                    (
3062                        f"Failed to fetch stock data table for model {model_id}"
3063                        f" (it's possible no data is present for the given date: {date})."
3064                        f" Error message: {error_msg}"
3065                    )
3066                )
3067            result = res.json()
3068            df = pd.DataFrame(result)
3069            gbi_ids.update(df.columns.to_list())
3070            frames.append(pd.DataFrame(result))
3071
3072        all_gbiid_df = pd.concat(frames)
3073
3074        # Get the metadata of all GBI IDs
3075        gbiid_metadata_res = self._get_graphql(
3076            query=graphql_queries.GET_SEC_INFO_QRY, variables={"ids": [int(x) for x in gbi_ids]}
3077        )
3078        # Build a DF of metadata x GBI IDs
3079        gbiid_metadata_df = pd.DataFrame(
3080            {str(x["gbiId"]): x for x in gbiid_metadata_res["data"]["securities"]}
3081        )
3082        # Slice metadata we care. We'll drop "symbol" at the end.
3083        isin_country_currency_df = gbiid_metadata_df.loc[["isin", "country", "currency", "symbol"]]
3084        # Concatenate metadata to the existing stock data DF
3085        all_gbiid_with_metadata_df = pd.concat([all_gbiid_df, isin_country_currency_df])
3086        gbiid_with_symbol_df = all_gbiid_with_metadata_df.loc[
3087            :, all_gbiid_with_metadata_df.loc["symbol"].notna()
3088        ]
3089        renamed_df = gbiid_with_symbol_df.rename(
3090            index={"isin": "ISIN"}, columns=gbiid_with_symbol_df.loc["symbol"].to_dict()
3091        )
3092        output_df = renamed_df.drop(index=["symbol"])
3093        return output_df
def add_hedge_experiment_scenario( self, experiment_id: str, scenario_name: str, scenario_settings: boosted.api.api_type.PortfolioSettings, run_scenario_immediately: bool) -> boosted.api.api_type.HedgeExperimentScenario:
3095    def add_hedge_experiment_scenario(
3096        self,
3097        experiment_id: str,
3098        scenario_name: str,
3099        scenario_settings: PortfolioSettings,
3100        run_scenario_immediately: bool,
3101    ) -> HedgeExperimentScenario:
3102        add_scenario_input = {
3103            "hedgeExperimentId": experiment_id,
3104            "scenarioName": scenario_name,
3105            "portfolioSettingsJson": str(scenario_settings),
3106            "runExperimentOnScenario": run_scenario_immediately,
3107            "createDefaultPortfolio": "false",
3108        }
3109        qry = """
3110            mutation addHedgeExperimentScenario(
3111                $input: AddHedgeExperimentScenarioInput!
3112            ) {
3113                addHedgeExperimentScenario(input: $input) {
3114                    hedgeExperimentScenario {
3115                        hedgeExperimentScenarioId
3116                        scenarioName
3117                        description
3118                        portfolioSettingsJson
3119                    }
3120                }
3121            }
3122
3123        """
3124
3125        url = f"{self.base_uri}/api/graphql"
3126
3127        resp = requests.post(
3128            url,
3129            headers={"Authorization": "ApiKey " + self.api_key},
3130            json={"query": qry, "variables": {"input": add_scenario_input}},
3131        )
3132
3133        json_resp = resp.json()
3134        if (resp.ok and "errors" in json_resp) or not resp.ok:
3135            error_msg = self._try_extract_error_code(resp)
3136            logger.error(error_msg)
3137            raise BoostedAPIException(
3138                (f"Failed to add scenario: {resp.status_code=}; {error_msg=}")
3139            )
3140
3141        scenario_dict = json_resp["data"]["addHedgeExperimentScenario"]["hedgeExperimentScenario"]
3142        if scenario_dict is None:
3143            raise BoostedAPIException(
3144                "Failed to add scenario, likely due to bad experiment id or api key"
3145            )
3146        s = HedgeExperimentScenario.from_json_dict(scenario_dict)
3147        return s
def create_hedge_experiment( self, name: str, description: str, experiment_type: Literal['HEDGE', 'MIMIC'], target_securities: Union[Dict[boosted.api.api_type.GbiIdSecurity, float], str, NoneType]) -> boosted.api.api_type.HedgeExperiment:
3156    def create_hedge_experiment(
3157        self,
3158        name: str,
3159        description: str,
3160        experiment_type: hedge_experiment_type,
3161        target_securities: Union[Dict[GbiIdSecurity, float], str, None],
3162    ) -> HedgeExperiment:
3163        # we don't pass target_securities here (as much as id like to) because the
3164        # graphql input doesn't support it at this point
3165
3166        # note that this query returns a lot of null fields at this point, but
3167        # they are necessary for building a HE.
3168        create_qry = """
3169            mutation createDraftMutation($input: CreateHedgeExperimentDraftInput!) {
3170                createHedgeExperimentDraft(input: $input) {
3171                    hedgeExperiment {
3172                        hedgeExperimentId
3173                        experimentName
3174                        userId
3175                        config
3176                        description
3177                        experimentType
3178                        lastCalculated
3179                        lastModified
3180                        status
3181                        portfolioCalcStatus
3182                        targetSecurities {
3183                            gbiId
3184                            security {
3185                                gbiId
3186                                name
3187                                symbol
3188                            }
3189                            weight
3190                        }
3191                        baselineModel {
3192                            id
3193                            name
3194                        }
3195                        baselineScenario {
3196                            hedgeExperimentScenarioId
3197                            scenarioName
3198                            description
3199                            portfolioSettingsJson
3200                            hedgeExperimentPortfolios {
3201                                portfolio {
3202                                    id
3203                                    name
3204                                    modelId
3205                                    performanceGridHeader
3206                                    performanceGrid
3207                                    status
3208                                    tearSheet {
3209                                        groupName
3210                                        members {
3211                                            name
3212                                            value
3213                                        }
3214                                    }
3215                                }
3216                            }
3217                            status
3218                        }
3219                        baselineStockUniverseId
3220                    }
3221                }
3222            }
3223        """
3224
3225        create_input: Dict[str, Any] = {
3226            "name": name,
3227            "experimentType": experiment_type,
3228            "description": description,
3229        }
3230        if isinstance(target_securities, dict):
3231            create_input["setTargetSecurities"] = [
3232                {"gbiId": sec.gbi_id, "weight": weight}
3233                for (sec, weight) in target_securities.items()
3234            ]
3235        elif isinstance(target_securities, str):
3236            create_input["setTargetPortfolios"] = [{"portfolioId": target_securities}]
3237        elif target_securities is None:
3238            pass
3239        else:
3240            raise TypeError(
3241                "Expected value of type Union[Dict[GbiIdSecurity, str], str] for "
3242                f"argument 'target_securities'; got {type(target_securities)}"
3243            )
3244        resp = requests.post(
3245            f"{self.base_uri}/api/graphql",
3246            json={"query": create_qry, "variables": {"input": create_input}},
3247            headers={"Authorization": "ApiKey " + self.api_key},
3248            params=self._request_params,
3249        )
3250
3251        json_resp = resp.json()
3252        if (resp.ok and "errors" in json_resp) or not resp.ok:
3253            error_msg = self._try_extract_error_code(resp)
3254            logger.error(error_msg)
3255            raise BoostedAPIException(
3256                (f"Failed to create hedge experiment: {resp.status_code=}; {error_msg=}")
3257            )
3258
3259        exp_dict = json_resp["data"]["createHedgeExperimentDraft"]["hedgeExperiment"]
3260        experiment = HedgeExperiment.from_json_dict(exp_dict)
3261        return experiment
def modify_hedge_experiment( self, experiment_id: str, name: Union[str, NoneType] = None, description: Union[str, NoneType] = None, experiment_type: Union[Literal['HEDGE', 'MIMIC'], NoneType] = None, target_securities: Union[Dict[boosted.api.api_type.GbiIdSecurity, float], str, NoneType] = None, model_ids: Union[List[str], NoneType] = None, stock_universe_ids: Union[List[str], NoneType] = None, create_default_scenario: bool = True, baseline_model_id: Union[str, NoneType] = None, baseline_stock_universe_id: Union[str, NoneType] = None, baseline_portfolio_settings: Union[str, NoneType] = None) -> boosted.api.api_type.HedgeExperiment:
3263    def modify_hedge_experiment(
3264        self,
3265        experiment_id: str,
3266        name: Optional[str] = None,
3267        description: Optional[str] = None,
3268        experiment_type: Optional[hedge_experiment_type] = None,
3269        target_securities: Union[Dict[GbiIdSecurity, float], str, None] = None,
3270        model_ids: Optional[List[str]] = None,
3271        stock_universe_ids: Optional[List[str]] = None,
3272        create_default_scenario: bool = True,
3273        baseline_model_id: Optional[str] = None,
3274        baseline_stock_universe_id: Optional[str] = None,
3275        baseline_portfolio_settings: Optional[str] = None,
3276    ) -> HedgeExperiment:
3277        mod_qry = """
3278            mutation modifyHedgeExperimentDraft(
3279                $input: ModifyHedgeExperimentDraftInput!
3280            ) {
3281                modifyHedgeExperimentDraft(input: $input) {
3282                    hedgeExperiment {
3283                    ...HedgeExperimentSelectedSecuritiesPageFragment
3284                    }
3285                }
3286            }
3287
3288            fragment HedgeExperimentSelectedSecuritiesPageFragment on HedgeExperiment {
3289                hedgeExperimentId
3290                experimentName
3291                userId
3292                config
3293                description
3294                experimentType
3295                lastCalculated
3296                lastModified
3297                status
3298                portfolioCalcStatus
3299                targetSecurities {
3300                    gbiId
3301                    security {
3302                        gbiId
3303                        name
3304                        symbol
3305                    }
3306                    weight
3307                }
3308                targetPortfolios {
3309                    portfolioId
3310                }
3311                baselineModel {
3312                    id
3313                    name
3314                }
3315                baselineScenario {
3316                    hedgeExperimentScenarioId
3317                    scenarioName
3318                    description
3319                    portfolioSettingsJson
3320                    hedgeExperimentPortfolios {
3321                        portfolio {
3322                            id
3323                            name
3324                            modelId
3325                            performanceGridHeader
3326                            performanceGrid
3327                            status
3328                            tearSheet {
3329                                groupName
3330                                members {
3331                                    name
3332                                    value
3333                                }
3334                            }
3335                        }
3336                    }
3337                    status
3338                }
3339                baselineStockUniverseId
3340            }
3341        """
3342        mod_input = {
3343            "hedgeExperimentId": experiment_id,
3344            "createDefaultScenario": create_default_scenario,
3345        }
3346        if name is not None:
3347            mod_input["newExperimentName"] = name
3348        if description is not None:
3349            mod_input["newExperimentDescription"] = description
3350        if experiment_type is not None:
3351            mod_input["newExperimentType"] = experiment_type
3352        if model_ids is not None:
3353            mod_input["setSelectdModels"] = model_ids
3354        if stock_universe_ids is not None:
3355            mod_input["selectedStockUniverseIds"] = stock_universe_ids
3356        if baseline_model_id is not None:
3357            mod_input["setBaselineModel"] = baseline_model_id
3358        if baseline_stock_universe_id is not None:
3359            mod_input["setBaselineStockUniverse"] = baseline_stock_universe_id
3360        if baseline_portfolio_settings is not None:
3361            mod_input["setBaselinePortfolioSettings"] = baseline_portfolio_settings
3362        # note that the behaviors bound to these data are mutually exclusive,
3363        # and its possible the opposite was set earlier in the DRAFT phase
3364        # of experiment creation, so when setting one, we must unset the other
3365        if isinstance(target_securities, dict):
3366            mod_input["setTargetSecurities"] = [
3367                {"gbiId": sec.gbi_id, "weight": weight}
3368                for (sec, weight) in target_securities.items()
3369            ]
3370            mod_input["setTargetPortfolios"] = None
3371        elif isinstance(target_securities, str):
3372            mod_input["setTargetPortfolios"] = [{"portfolioId": target_securities}]
3373            mod_input["setTargetSecurities"] = None
3374        elif target_securities is None:
3375            pass
3376        else:
3377            raise TypeError(
3378                "Expected value of type Union[Dict[GbiIdSecurity, str], str] "
3379                f"for argument 'target_securities'; got {type(target_securities)}"
3380            )
3381
3382        resp = requests.post(
3383            f"{self.base_uri}/api/graphql",
3384            json={"query": mod_qry, "variables": {"input": mod_input}},
3385            headers={"Authorization": "ApiKey " + self.api_key},
3386            params=self._request_params,
3387        )
3388
3389        json_resp = resp.json()
3390        if (resp.ok and "errors" in json_resp) or not resp.ok:
3391            error_msg = self._try_extract_error_code(resp)
3392            logger.error(error_msg)
3393            raise BoostedAPIException(
3394                (
3395                    f"Failed to modify hedge experiment in preparation for start {experiment_id=}: "
3396                    f"{resp.status_code=}; {error_msg=}"
3397                )
3398            )
3399
3400        exp_dict = json_resp["data"]["modifyHedgeExperimentDraft"]["hedgeExperiment"]
3401        experiment = HedgeExperiment.from_json_dict(exp_dict)
3402        return experiment
def start_hedge_experiment( self, experiment_id: str, *scenario_ids: str) -> boosted.api.api_type.HedgeExperiment:
3404    def start_hedge_experiment(self, experiment_id: str, *scenario_ids: str) -> HedgeExperiment:
3405        start_qry = """
3406            mutation startHedgeExperiment($input: StartHedgeExperimentInput!) {
3407                startHedgeExperiment(input: $input) {
3408                    hedgeExperiment {
3409                        hedgeExperimentId
3410                        experimentName
3411                        userId
3412                        config
3413                        description
3414                        experimentType
3415                        lastCalculated
3416                        lastModified
3417                        status
3418                        portfolioCalcStatus
3419                        targetSecurities {
3420                            gbiId
3421                            security {
3422                                gbiId
3423                                name
3424                                symbol
3425                            }
3426                            weight
3427                        }
3428                        targetPortfolios {
3429                            portfolioId
3430                        }
3431                        baselineModel {
3432                            id
3433                            name
3434                        }
3435                        baselineScenario {
3436                            hedgeExperimentScenarioId
3437                            scenarioName
3438                            description
3439                            portfolioSettingsJson
3440                            hedgeExperimentPortfolios {
3441                                portfolio {
3442                                    id
3443                                    name
3444                                    modelId
3445                                    performanceGridHeader
3446                                    performanceGrid
3447                                    status
3448                                    tearSheet {
3449                                        groupName
3450                                        members {
3451                                            name
3452                                            value
3453                                        }
3454                                    }
3455                                }
3456                            }
3457                            status
3458                        }
3459                        baselineStockUniverseId
3460                    }
3461                }
3462            }
3463        """
3464        start_input: Dict[str, Any] = {"hedgeExperimentId": experiment_id}
3465        if len(scenario_ids) > 0:
3466            start_input["hedgeExperimentScenarioIds"] = list(scenario_ids)
3467
3468        resp = requests.post(
3469            f"{self.base_uri}/api/graphql",
3470            json={"query": start_qry, "variables": {"input": start_input}},
3471            headers={"Authorization": "ApiKey " + self.api_key},
3472            params=self._request_params,
3473        )
3474
3475        json_resp = resp.json()
3476        if (resp.ok and "errors" in json_resp) or not resp.ok:
3477            error_msg = self._try_extract_error_code(resp)
3478            logger.error(error_msg)
3479            raise BoostedAPIException(
3480                (
3481                    f"Failed to start hedge experiment {experiment_id=}: "
3482                    f"{resp.status_code=}; {error_msg=}"
3483                )
3484            )
3485
3486        exp_dict = json_resp["data"]["startHedgeExperiment"]["hedgeExperiment"]
3487        experiment = HedgeExperiment.from_json_dict(exp_dict)
3488        return experiment
def delete_hedge_experiment(self, experiment_id: str) -> bool:
3490    def delete_hedge_experiment(self, experiment_id: str) -> bool:
3491        delete_qry = """
3492            mutation($input: DeleteHedgeExperimentsInput!) {
3493                deleteHedgeExperiments(input: $input) {
3494                    success
3495                }
3496            }
3497        """
3498        delete_input = {"hedgeExperimentIds": [experiment_id]}
3499        resp = requests.post(
3500            f"{self.base_uri}/api/graphql",
3501            json={"query": delete_qry, "variables": {"input": delete_input}},
3502            headers={"Authorization": "ApiKey " + self.api_key},
3503            params=self._request_params,
3504        )
3505
3506        json_resp = resp.json()
3507        if (resp.ok and "errors" in json_resp) or not resp.ok:
3508            error_msg = self._try_extract_error_code(resp)
3509            logger.error(error_msg)
3510            raise BoostedAPIException(
3511                (
3512                    f"Failed to delete hedge experiment {experiment_id=}: "
3513                    + f"status_code={resp.status_code}; error_msg={error_msg}"
3514                )
3515            )
3516
3517        return json_resp["data"]["deleteHedgeExperiments"]["success"]
def create_hedge_basket_position_bounds_from_csv( self, filepath: str, name: str, description: Union[str, NoneType], mapping_result_filepath: Union[str, NoneType]) -> str:
3519    def create_hedge_basket_position_bounds_from_csv(
3520        self,
3521        filepath: str,
3522        name: str,
3523        description: Optional[str],
3524        mapping_result_filepath: Optional[str],
3525    ) -> str:
3526        DATE = "Date"
3527        ISIN = "ISIN"
3528        COUNTRY = "Country"
3529        CURRENCY = "Currency"
3530        LOWER_BOUND = "Lower Bound"
3531        UPPER_BOUND = "Upper Bound"
3532        supported_columns = {
3533            DATE,
3534            ISIN,
3535            COUNTRY,
3536            CURRENCY,
3537            LOWER_BOUND,
3538            UPPER_BOUND,
3539        }
3540        required_columns = {ISIN, LOWER_BOUND, UPPER_BOUND}
3541
3542        try:
3543            df: pd.DataFrame = pd.read_csv(filepath, parse_dates=True)
3544        except Exception as e:
3545            raise BoostedAPIException(f"Error reading {filepath=}: {e}")
3546
3547        columns = set(df.columns)
3548
3549        # First perform basic data validation
3550        missing_required_columns = required_columns - columns
3551        if missing_required_columns:
3552            raise BoostedAPIException(
3553                f"The following required columns are missing: {missing_required_columns}"
3554            )
3555        extra_columns = columns - supported_columns
3556        if extra_columns:
3557            logger.warning(
3558                f"The following columns are unsupported and will be ignored: {extra_columns}"
3559            )
3560        try:
3561            df[LOWER_BOUND] = df[LOWER_BOUND].astype(float)
3562            df[UPPER_BOUND] = df[UPPER_BOUND].astype(float)
3563            df[ISIN] = df[ISIN].astype(str)
3564        except Exception as e:
3565            raise BoostedAPIException(f"Column datatypes are incorrect: {e}")
3566        lb_gt_ub = df[df[LOWER_BOUND] > df[UPPER_BOUND]]
3567        if not lb_gt_ub.empty:
3568            raise BoostedAPIException(
3569                f"Lower Bound must be <= Upper Bound, but these are not: {lb_gt_ub[ISIN].tolist()}"
3570            )
3571        out_of_range = df[
3572            (
3573                (df[LOWER_BOUND] < 0)
3574                | (df[LOWER_BOUND] > 1)
3575                | (df[UPPER_BOUND] < 0)
3576                | (df[UPPER_BOUND] > 1)
3577            )
3578        ]
3579        if not out_of_range.empty:
3580            raise BoostedAPIException("Lower Bound and Upper Bound values must be in range [0, 1]")
3581
3582        # Now map the security info into GBI IDs
3583        rows = list(df.to_dict(orient="index").values())
3584        sec_data_list = self.getGbiIdFromIdentCountryCurrencyDate(
3585            ident_country_currency_dates=[
3586                DateIdentCountryCurrency(
3587                    date=row.get(DATE, datetime.date.today().isoformat()),
3588                    identifier=row.get(ISIN),
3589                    id_type=ColumnSubRole.ISIN,
3590                    country=row.get(COUNTRY),
3591                    currency=row.get(CURRENCY),
3592                )
3593                for row in rows
3594            ]
3595        )
3596
3597        # Now take each row and its gbi id mapping, and create the bounds list
3598        bounds = []
3599        for row, sec_data in zip(rows, sec_data_list):
3600            if sec_data is None:
3601                logger.warning(f"Failed to map {row[ISIN]}, skipping this security.")
3602            else:
3603                bounds.append(
3604                    {"gbi_id": str(sec_data.gbi_id), "lb": row[LOWER_BOUND], "ub": row[UPPER_BOUND]}
3605                )
3606
3607                # Add security metadata to see the mapping
3608                row["Mapped GBI ID"] = sec_data.gbi_id
3609                row[f"Mapped {ISIN}"] = sec_data.isin_info.identifier
3610                row[f"Mapped {COUNTRY}"] = sec_data.isin_info.country
3611                row[f"Mapped {CURRENCY}"] = sec_data.isin_info.currency
3612                row["Mapped Ticker"] = sec_data.ticker
3613                row["Mapped Company Name"] = sec_data.company_name
3614
3615        # Call endpoint to create the bounds settings template
3616        qry = """
3617              mutation CreatePartialStrategyTemplate(
3618                $portfolioSettingsKey: String!
3619                $partialSettings: String!
3620                $name: String!
3621                $description: String
3622              ) {
3623                createPartialStrategyTemplate(
3624                  portfolioSettingsKey: $portfolioSettingsKey
3625                  partialSettings: $partialSettings
3626                  name: $name
3627                  description: $description
3628                )
3629              }
3630            """
3631        variables = {
3632            "portfolioSettingsKey": "basketTrading.positionSizeBounds",
3633            "partialSettings": json.dumps(bounds),
3634            "name": name,
3635            "description": description,
3636        }
3637        resp = self._get_graphql(qry, variables=variables)
3638
3639        # Write mapped csv for reference
3640        if mapping_result_filepath is not None:
3641            pd.DataFrame(rows).to_csv(mapping_result_filepath)
3642
3643        return resp["data"]["createPartialStrategyTemplate"]
def get_hit_rate_file(self, model_id: str, portfolio_id: str, file_key: str) -> dict:
3645    def get_hit_rate_file(self, model_id: str, portfolio_id: str, file_key: str) -> dict:
3646        url = f"{self.base_uri}{ROUTE_PREFIX}{DAL_PA_ROUTE}/get-hit-rate-file/"
3647        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
3648        req_json = {"model_id": model_id, "portfolio_id": portfolio_id, "file_key": file_key}
3649        res = requests.post(url, json=req_json, headers=headers, **self._request_params)
3650        if not res.ok:
3651            error_msg = self._try_extract_error_code(res)
3652            logger.error(error_msg)
3653            raise BoostedAPIException(f"Failed to get Hit Rate file: {error_msg}")
3654
3655        data = res.json()
3656        return data
def get_hit_rate_with_securities( self, model_id: str, portfolio_id: str, meet_all_conditions: bool, securities: List[str], countries: List[str], sectors: List[str], start_date: Union[datetime.date, str, NoneType], end_date: Union[datetime.date, str, NoneType]) -> dict:
3658    def get_hit_rate_with_securities(
3659        self,
3660        model_id: str,
3661        portfolio_id: str,
3662        meet_all_conditions: bool,
3663        securities: List[str],
3664        countries: List[str],
3665        sectors: List[str],
3666        start_date: Optional[BoostedDate],
3667        end_date: Optional[BoostedDate],
3668    ) -> dict:
3669
3670        start_date, end_date = get_date_range(start_date=start_date, end_date=end_date)
3671        start_date, end_date = start_date.isoformat(), end_date.isoformat()
3672
3673        url = f"{self.base_uri}{WATCHLIST_ROUTE_PREFIX}{DAL_PA_ROUTE}/get-hit-rate/"  # noqa f"http://0.0.0.0:8000{DAL_PA_ROUTE}/get-securities-hit-rate/"
3674        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
3675        req_json = {
3676            "model_id": model_id,
3677            "portfolio_id": portfolio_id,
3678            "meet_all_conditions": meet_all_conditions,
3679            "securities": securities,
3680            "countries": countries,
3681            "sectors": sectors,
3682            "start_date": start_date,
3683            "end_date": end_date,
3684        }
3685        res = requests.post(url, json=req_json, headers=headers, **self._request_params)
3686
3687        if not res.ok:
3688            error_msg = self._try_extract_error_code(res)
3689            logger.error(error_msg)
3690            raise BoostedAPIException(f"Failed to get Hit Rate with securities: {error_msg}")
3691
3692        data = res.json()
3693        return data
def get_portfolio_accuracy( self, model_id: str, portfolio_id: str, start_date: Union[datetime.date, str, NoneType] = None, end_date: Union[datetime.date, str, NoneType] = None) -> dict:
3695    def get_portfolio_accuracy(
3696        self,
3697        model_id: str,
3698        portfolio_id: str,
3699        start_date: Optional[BoostedDate] = None,
3700        end_date: Optional[BoostedDate] = None,
3701    ) -> dict:
3702        if start_date and end_date:
3703            validate_start_and_end_dates(start_date=start_date, end_date=end_date)
3704            start_date = convert_date(start_date)
3705            end_date = convert_date(end_date)
3706
3707        # TODO: Later change this URI to not use the watchlist prefix. It is misnamed.
3708        url = f"{self.base_uri}{WATCHLIST_ROUTE_PREFIX}{DAL_PA_ROUTE}/get-hit-rate/"
3709        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
3710        req_json = {"model_id": model_id, "portfolio_id": portfolio_id}
3711        if start_date and end_date:
3712            req_json["start_date"] = start_date.isoformat()
3713            req_json["end_date"] = end_date.isoformat()
3714        res = requests.post(url, json=req_json, headers=headers, **self._request_params)
3715
3716        if not res.ok:
3717            error_msg = self._try_extract_error_code(res)
3718            logger.error(error_msg)
3719            raise BoostedAPIException(f"Failed to get Hit Rate: {error_msg}")
3720
3721        data = res.json()
3722        return data
def create_watchlist(self, name: str) -> str:
3724    def create_watchlist(self, name: str) -> str:
3725        url = f"{self.base_uri}{WATCHLIST_ROUTE_PREFIX}{DAL_WATCHLIST_ROUTE}/create/"
3726        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
3727        req_json = {"name": name}
3728        res = requests.post(url, json=req_json, headers=headers, **self._request_params)
3729
3730        if not res.ok:
3731            error_msg = self._try_extract_error_code(res)
3732            logger.error(error_msg)
3733            raise BoostedAPIException(f"Failed to get user models: {error_msg}")
3734
3735        data = res.json()
3736        return data["watchlist_id"]
def get_coverage_info( self, watchlist_id: str, portfolio_group_id: str) -> pandas.core.frame.DataFrame:
3865    def get_coverage_info(self, watchlist_id: str, portfolio_group_id: str) -> pd.DataFrame:
3866        # get securities list in watchlist
3867        watchlist_details = self.get_watchlist_details(watchlist_id)
3868        security_list = watchlist_details["targets"]
3869
3870        gbi_ids = [x["gbi_id"] for x in security_list]
3871
3872        gbi_data: Dict[Any, Dict] = {x: {} for x in gbi_ids}
3873
3874        # get security info ticker, name, industry etc
3875        sec_info = self._get_security_info(gbi_ids)
3876
3877        for sec in sec_info["data"]["securities"]:
3878            gbi_id = sec["gbiId"]
3879            for k in ["symbol", "name", "isin", "country", "currency"]:
3880                gbi_data[gbi_id][self._coverage_column_name_format(k)] = sec[k]
3881
3882            gbi_data[gbi_id][self._coverage_column_name_format("Sector")] = sec["sector"][
3883                "topParentName"
3884            ]
3885
3886        # get portfolios list in portfolio_Group
3887        portfolio_group = self.get_portfolio_group(portfolio_group_id)
3888        portfolio_ids = [x["portfolio_id"] for x in portfolio_group["portfolios"]]
3889        portfolio_info = {x["portfolio_id"]: x for x in portfolio_group["portfolios"]}
3890
3891        model_resp = self._get_models_for_portfolio(portfolio_ids=portfolio_ids)
3892        for portfolio in model_resp["data"]["portfolios"]:
3893            portfolio_info[portfolio["id"]].update(portfolio)
3894
3895        model_info = {
3896            x["modelId"]: portfolio_info[x["id"]] for x in model_resp["data"]["portfolios"]
3897        }
3898
3899        # model_ids and portfolio_ids are parallel arrays
3900        model_ids = [portfolio_info[x]["modelId"] for x in portfolio_ids]
3901
3902        # graphql: get watchlist analysis
3903        wl_analysis = self._get_watchlist_analysis(
3904            gbi_ids=gbi_ids,
3905            model_ids=model_ids,
3906            portfolio_ids=portfolio_ids,
3907            asof_date=datetime.date.today(),
3908        )
3909
3910        portfolio_gbi_data: Dict[Any, Dict] = {k: {} for k in portfolio_ids}
3911        for pi, v in portfolio_gbi_data.items():
3912            v.update({k: {} for k in gbi_data.keys()})
3913
3914        equity_explorer_date = wl_analysis["data"]["watchlistAnalysis"][0]["analysisDates"][0][
3915            "date"
3916        ]
3917        for wla in wl_analysis["data"]["watchlistAnalysis"]:
3918            gbi_id = wla["gbiId"]
3919            gbi_data[gbi_id]["Composite Rating"] = wla["analysisDates"][0]["aggregateSignal"][
3920                "rating"
3921            ]
3922            gbi_data[gbi_id]["Composite Rating Delta"] = wla["analysisDates"][0]["aggregateSignal"][
3923                "ratingDelta"
3924            ]
3925
3926            for p in wla["analysisDates"][0]["portfoliosSignals"]:
3927                model_name = portfolio_info[p["portfolioId"]]["modelName"]
3928
3929                portfolio_gbi_data[p["portfolioId"]][gbi_id][
3930                    model_name + self._coverage_column_name_format(": rank")
3931                ] = (p["rank"] + 1)
3932                portfolio_gbi_data[p["portfolioId"]][gbi_id][
3933                    model_name + self._coverage_column_name_format(": rank delta")
3934                ] = (-1 * p["signalDelta"])
3935                portfolio_gbi_data[p["portfolioId"]][gbi_id][
3936                    model_name + self._coverage_column_name_format(": rating")
3937                ] = p["rating"]
3938                portfolio_gbi_data[p["portfolioId"]][gbi_id][
3939                    model_name + self._coverage_column_name_format(": rating delta")
3940                ] = p["ratingDelta"]
3941
3942        neg_rec: Dict[Any, Dict] = {k: {} for k in gbi_data.keys()}
3943        pos_rec: Dict[Any, Dict] = {k: {} for k in gbi_data.keys()}
3944        for wla in wl_analysis["data"]["watchlistAnalysis"]:
3945            gbi_id = wla["gbiId"]
3946
3947            for pid, signals in zip(portfolio_ids, wla["analysisDates"][0]["portfoliosSignals"]):
3948                model_name = portfolio_info[pid]["modelName"]
3949                neg_rec[gbi_id][
3950                    model_name + self._coverage_column_name_format(": negative recommendation")
3951                ] = signals["explainWeightNeg"]
3952                pos_rec[gbi_id][
3953                    model_name + self._coverage_column_name_format(": positive recommendation")
3954                ] = signals["explainWeightPos"]
3955
3956        # graphql: GetExcessReturn - slugging pct
3957        er_sp = self._get_excess_return(
3958            model_ids=model_ids, gbi_ids=gbi_ids, asof_date=equity_explorer_date
3959        )
3960
3961        for model in er_sp["data"]["models"]:
3962            model_name = model_info[model["id"]]["modelName"]
3963            for stat in model["equityExplorerData"]["equityExplorerSummaryStatistics"]:
3964                portfolioId = model_info[model["id"]]["id"]
3965                portfolio_gbi_data[portfolioId][int(stat["gbiId"])][
3966                    model_name + self._coverage_column_name_format(": slugging %")
3967                ] = (stat["ER"]["SP"]["sixMonthWindowOneMonthHorizon"] * 100)
3968
3969        # add rank, rating, slugging
3970        for pid, v in portfolio_gbi_data.items():
3971            for gbi_id, vv in v.items():
3972                gbi_data[gbi_id].update(vv)
3973
3974        # add neg/pos rec scores
3975        for rec in [neg_rec, pos_rec]:
3976            for k, v in rec.items():
3977                gbi_data[k].update(v)
3978
3979        df = pd.DataFrame.from_records([v for _, v in gbi_data.items()])
3980
3981        return df
def get_coverage_csv( self, watchlist_id: str, portfolio_group_id: str, filepath: Union[str, NoneType] = None) -> Union[str, NoneType]:
3983    def get_coverage_csv(
3984        self, watchlist_id: str, portfolio_group_id: str, filepath: Optional[str] = None
3985    ) -> Optional[str]:
3986        """
3987        Converts the coverage contents to CSV format
3988        Parameters
3989        ----------
3990        watchlist_id: str
3991            UUID str identifying the coverage watchlist
3992        portfolio_group_id: str
3993            UUID str identifying the group of portfolio to use for analysis
3994        filepath: Optional[str]
3995            UUID str identifying the group of portfolio to use for analysis
3996
3997        Returns:
3998        ----------
3999        None if filepath is provided, else a string with a csv's contents is returned
4000        """
4001
4002        df = self.get_coverage_info(watchlist_id, portfolio_group_id)
4003
4004        return df.to_csv(filepath, index=False, float_format="%.4f")

Converts the coverage contents to CSV format

Parameters

watchlist_id: str UUID str identifying the coverage watchlist portfolio_group_id: str UUID str identifying the group of portfolio to use for analysis filepath: Optional[str] UUID str identifying the group of portfolio to use for analysis

Returns:

None if filepath is provided, else a string with a csv's contents is returned

def get_watchlist_details(self, watchlist_id: str) -> Dict:
4006    def get_watchlist_details(self, watchlist_id: str) -> Dict:
4007        url = f"{self.base_uri}{ROUTE_PREFIX}{DAL_WATCHLIST_ROUTE}/details/"
4008        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
4009        req_json = {"watchlist_id": watchlist_id}
4010        res = requests.post(url, json=req_json, headers=headers, **self._request_params)
4011
4012        if not res.ok:
4013            error_msg = self._try_extract_error_code(res)
4014            logger.error(error_msg)
4015            raise BoostedAPIException(f"Failed to get user models: {error_msg}")
4016
4017        data = res.json()
4018        return data
def create_watchlist_from_file(self, name: str, filepath: str) -> str:
4020    def create_watchlist_from_file(self, name: str, filepath: str) -> str:
4021        url = f"{self.base_uri}{ROUTE_PREFIX}{DAL_WATCHLIST_ROUTE}/create_watchlist_from_file/"
4022        headers = {"Authorization": "ApiKey " + self.api_key}
4023
4024        with open(filepath, "rb") as fp:
4025            file_bytes = fp.read()
4026
4027        file_bytes_base64 = base64.b64encode(file_bytes).decode("ascii")
4028        json_req = {
4029            "content_type": mimetypes.guess_type(filepath)[0],
4030            "file_bytes_base64": file_bytes_base64,
4031            "name": name,
4032        }
4033
4034        res = requests.post(url, json=json_req, headers=headers)
4035
4036        if not res.ok:
4037            error_msg = self._try_extract_error_code(res)
4038            logger.error(error_msg)
4039            raise BoostedAPIException(f"Failed to create watchlist from file: {error_msg}")
4040
4041        data = res.json()
4042        return data["watchlist_id"]
def get_watchlists(self) -> List[Dict]:
4044    def get_watchlists(self) -> List[Dict]:
4045        url = f"{self.base_uri}{ROUTE_PREFIX}{DAL_WATCHLIST_ROUTE}/get_user_watchlists/"
4046        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
4047        req_json: Dict = {}
4048        res = requests.post(url, json=req_json, headers=headers, **self._request_params)
4049
4050        if not res.ok:
4051            error_msg = self._try_extract_error_code(res)
4052            logger.error(error_msg)
4053            raise BoostedAPIException(f"Failed to get user watchlists: {error_msg}")
4054
4055        data = res.json()
4056        return data["watchlists"]
def get_watchlist_contents(self, watchlist_id) -> Dict:
4058    def get_watchlist_contents(self, watchlist_id) -> Dict:
4059        url = f"{self.base_uri}{ROUTE_PREFIX}{DAL_WATCHLIST_ROUTE}/contents/"
4060        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
4061        req_json = {"watchlist_id": watchlist_id}
4062        res = requests.post(url, json=req_json, headers=headers, **self._request_params)
4063
4064        if not res.ok:
4065            error_msg = self._try_extract_error_code(res)
4066            logger.error(error_msg)
4067            raise BoostedAPIException(f"Failed to get watchlist contents: {error_msg}")
4068
4069        data = res.json()
4070        return data
def get_watchlist_contents_as_csv(self, watchlist_id, filepath) -> None:
4072    def get_watchlist_contents_as_csv(self, watchlist_id, filepath) -> None:
4073        data = self.get_watchlist_contents(watchlist_id)
4074        df = pd.DataFrame(data["contents"])
4075        df.to_csv(filepath, index=False)
def add_securities_to_watchlist( self, watchlist_id: str, identifiers: List[str], identifier_type: Literal['TICKER', 'ISIN']) -> Dict:
4078    def add_securities_to_watchlist(
4079        self, watchlist_id: str, identifiers: List[str], identifier_type: Literal["TICKER", "ISIN"]
4080    ) -> Dict:
4081        # should we just make the arg lower? all caps has a flag-like feel to it
4082        id_type = identifier_type.lower()
4083        url = f"{self.base_uri}{WATCHLIST_ROUTE_PREFIX}{DAL_WATCHLIST_ROUTE}/add_{id_type}s/"
4084        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
4085        req_json = {"watchlist_id": watchlist_id, id_type: identifiers}
4086        res = requests.post(url, json=req_json, headers=headers, **self._request_params)
4087
4088        if not res.ok:
4089            error_msg = self._try_extract_error_code(res)
4090            logger.error(error_msg)
4091            raise BoostedAPIException(f"Failed to get user models: {error_msg}")
4092
4093        data = res.json()
4094        return data
def remove_securities_from_watchlist( self, watchlist_id: str, identifiers: List[str], identifier_type: Literal['TICKER', 'ISIN']) -> Dict:
4096    def remove_securities_from_watchlist(
4097        self, watchlist_id: str, identifiers: List[str], identifier_type: Literal["TICKER", "ISIN"]
4098    ) -> Dict:
4099        # should we just make the arg lower? all caps has a flag-like feel to it
4100        id_type = identifier_type.lower()
4101        url = f"{self.base_uri}{WATCHLIST_ROUTE_PREFIX}{DAL_WATCHLIST_ROUTE}/remove_{id_type}s/"
4102        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
4103        req_json = {"watchlist_id": watchlist_id, id_type: identifiers}
4104        res = requests.post(url, json=req_json, headers=headers, **self._request_params)
4105
4106        if not res.ok:
4107            error_msg = self._try_extract_error_code(res)
4108            logger.error(error_msg)
4109            raise BoostedAPIException(f"Failed to get user models: {error_msg}")
4110
4111        data = res.json()
4112        return data
def get_portfolio_groups(self) -> Dict:
4114    def get_portfolio_groups(
4115        self,
4116    ) -> Dict:
4117        """
4118        Parameters: None
4119
4120
4121        Returns:
4122        ----------
4123
4124        Dict:  {
4125        user_id: str
4126        portfolio_groups: List[PortfolioGroup]
4127        }
4128        where PortfolioGroup is defined as = Dict {
4129        group_id: str
4130        group_name: str
4131        portfolios: List[PortfolioInGroup]
4132        }
4133        where PortfolioInGroup is defined as = Dict {
4134        portfolio_id: str
4135        rank_in_group: Optional[int]
4136        }
4137        """
4138        url = f"{self.base_uri}{WATCHLIST_ROUTE_PREFIX}{PORTFOLIO_GROUP_ROUTE}/get"
4139        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
4140        req_json: Dict = {}
4141        res = requests.post(url, json=req_json, headers=headers, **self._request_params)
4142
4143        if not res.ok:
4144            error_msg = self._try_extract_error_code(res)
4145            logger.error(error_msg)
4146            raise BoostedAPIException(f"Failed to get user portfolio groups: {error_msg}")
4147
4148        data = res.json()
4149        return data

Parameters: None

Returns:

Dict: { user_id: str portfolio_groups: List[PortfolioGroup] } where PortfolioGroup is defined as = Dict { group_id: str group_name: str portfolios: List[PortfolioInGroup] } where PortfolioInGroup is defined as = Dict { portfolio_id: str rank_in_group: Optional[int] }

def get_portfolio_group(self, portfolio_group_id: str) -> Dict:
4151    def get_portfolio_group(self, portfolio_group_id: str) -> Dict:
4152        """
4153        Parameters:
4154        portfolio_group_id: str
4155           UUID identifier for the portfolio group
4156
4157
4158        Returns:
4159        ----------
4160
4161        PortfolioGroup: Dict:  {
4162        group_id: str
4163        group_name: str
4164        portfolios: List[PortfolioInGroup]
4165        }
4166        where PortfolioInGroup is defined as = Dict {
4167        portfolio_id: str
4168        portfolio_name: str
4169        rank_in_group: Optional[int]
4170        }
4171        """
4172        url = f"{self.base_uri}{WATCHLIST_ROUTE_PREFIX}{PORTFOLIO_GROUP_ROUTE}/get-one"
4173        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
4174        req_json = {"portfolio_group_id": portfolio_group_id}
4175        res = requests.post(url, json=req_json, headers=headers, **self._request_params)
4176
4177        if not res.ok:
4178            error_msg = self._try_extract_error_code(res)
4179            logger.error(error_msg)
4180            raise BoostedAPIException(f"Failed to get user portfolio groups: {error_msg}")
4181
4182        data = res.json()
4183        return data

Parameters: portfolio_group_id: str UUID identifier for the portfolio group

Returns:

PortfolioGroup: Dict: { group_id: str group_name: str portfolios: List[PortfolioInGroup] } where PortfolioInGroup is defined as = Dict { portfolio_id: str portfolio_name: str rank_in_group: Optional[int] }

def set_sticky_portfolio_group(self, portfolio_group_id: str) -> Dict:
4185    def set_sticky_portfolio_group(
4186        self,
4187        portfolio_group_id: str,
4188    ) -> Dict:
4189        """
4190        Set sticky portfolio group
4191
4192        Parameters
4193        ----------
4194
4195        group_id: str,
4196           UUID str identifying a portfolio group
4197
4198        Returns:
4199        -------
4200        Dict {
4201            changed: int - 1 == success
4202        }
4203        """
4204        url = f"{self.base_uri}{WATCHLIST_ROUTE_PREFIX}{PORTFOLIO_GROUP_ROUTE}/set-sticky"
4205        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
4206        req_json = {"portfolio_group_id": portfolio_group_id}
4207        res = requests.post(url, json=req_json, headers=headers, **self._request_params)
4208
4209        if not res.ok:
4210            error_msg = self._try_extract_error_code(res)
4211            logger.error(error_msg)
4212            raise BoostedAPIException(f"Failed to set sticky portfolio group: {error_msg}")
4213
4214        data = res.json()
4215        return data

Set sticky portfolio group

Parameters

group_id: str, UUID str identifying a portfolio group

Returns:

Dict { changed: int - 1 == success }

def get_sticky_portfolio_group(self) -> Dict:
4217    def get_sticky_portfolio_group(
4218        self,
4219    ) -> Dict:
4220        """
4221        Get sticky portfolio group for the user
4222
4223        Parameters
4224        ----------
4225
4226        Returns:
4227        -------
4228        Dict {
4229            group_id: str
4230            group_name: str
4231            portfolios: List[PortfolioInGroup(Dict)]
4232                  PortfolioInGroup(Dict):
4233                           portfolio_id: str
4234                           rank_in_group: Optional[int] = None
4235                           portfolio_name: Optional[str] = None
4236        }
4237        """
4238        url = f"{self.base_uri}{WATCHLIST_ROUTE_PREFIX}{PORTFOLIO_GROUP_ROUTE}/get-sticky"
4239        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
4240        req_json: Dict = {}
4241        res = requests.post(url, json=req_json, headers=headers, **self._request_params)
4242
4243        if not res.ok:
4244            error_msg = self._try_extract_error_code(res)
4245            logger.error(error_msg)
4246            raise BoostedAPIException(f"Failed to get sticky portfolio group: {error_msg}")
4247
4248        data = res.json()
4249        return data

Get sticky portfolio group for the user

Parameters

Returns:

Dict { group_id: str group_name: str portfolios: List[PortfolioInGroup(Dict)] PortfolioInGroup(Dict): portfolio_id: str rank_in_group: Optional[int] = None portfolio_name: Optional[str] = None }

def create_portfolio_group( self, group_name: str, portfolios: Union[List[Dict], NoneType] = None) -> Dict:
4251    def create_portfolio_group(
4252        self,
4253        group_name: str,
4254        portfolios: Optional[List[Dict]] = None,
4255    ) -> Dict:
4256        """
4257        Create a new portfolio group
4258
4259        Parameters
4260        ----------
4261
4262        group_name: str
4263           name of the new group
4264
4265        portfolios: List of Dict [:
4266
4267        portfolio_id: str
4268        rank_in_group: Optional[int] = None
4269        ]
4270
4271        Returns:
4272        ----------
4273
4274        Dict: {
4275        group_id: str
4276           UUID identifier for the portfolio group
4277
4278        created: int
4279           num groups created, 1 == success
4280
4281        added: int
4282           num portfolios added to the group, should match the length of 'portfolios' argument
4283        }
4284        """
4285        url = f"{self.base_uri}{WATCHLIST_ROUTE_PREFIX}{PORTFOLIO_GROUP_ROUTE}/create"
4286        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
4287        req_json = {"group_name": group_name, "portfolios": portfolios}
4288
4289        res = requests.post(url, json=req_json, headers=headers, **self._request_params)
4290
4291        if not res.ok:
4292            error_msg = self._try_extract_error_code(res)
4293            logger.error(error_msg)
4294            raise BoostedAPIException(f"Failed to create portfolio group: {error_msg}")
4295
4296        data = res.json()
4297        return data

Create a new portfolio group

Parameters

group_name: str name of the new group

portfolios: List of Dict [:

portfolio_id: str rank_in_group: Optional[int] = None ]

Returns:

Dict: { group_id: str UUID identifier for the portfolio group

created: int num groups created, 1 == success

added: int num portfolios added to the group, should match the length of 'portfolios' argument }

def rename_portfolio_group(self, group_id: str, group_name: str) -> Dict:
4299    def rename_portfolio_group(
4300        self,
4301        group_id: str,
4302        group_name: str,
4303    ) -> Dict:
4304        """
4305        Rename a portfolio group
4306
4307        Parameters
4308        ----------
4309
4310        group_id: str,
4311           UUID str identifying a portfolio group
4312
4313        group_name: str,
4314           The new name for the porfolio
4315
4316        Returns:
4317        -------
4318        Dict {
4319            changed: int - 1 == success
4320        }
4321        """
4322        url = f"{self.base_uri}{WATCHLIST_ROUTE_PREFIX}{PORTFOLIO_GROUP_ROUTE}/rename"
4323        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
4324        req_json = {"group_id": group_id, "group_name": group_name}
4325        res = requests.post(url, json=req_json, headers=headers, **self._request_params)
4326
4327        if not res.ok:
4328            error_msg = self._try_extract_error_code(res)
4329            logger.error(error_msg)
4330            raise BoostedAPIException(f"Failed to rename portfolio group: {error_msg}")
4331
4332        data = res.json()
4333        return data

Rename a portfolio group

Parameters

group_id: str, UUID str identifying a portfolio group

group_name: str, The new name for the porfolio

Returns:

Dict { changed: int - 1 == success }

def add_to_portfolio_group(self, group_id: str, portfolios: List[Dict]) -> Dict:
4335    def add_to_portfolio_group(
4336        self,
4337        group_id: str,
4338        portfolios: List[Dict],
4339    ) -> Dict:
4340        """
4341        Add portfolios to a group
4342
4343        Parameters
4344        ----------
4345
4346        group_id: str,
4347           UUID str identifying a portfolio group
4348
4349        portfolios: List of Dict [:
4350            portfolio_id: str
4351            rank_in_group: Optional[int] = None
4352        ]
4353
4354
4355        Returns:
4356        -------
4357        Dict {
4358            added: int
4359               number of successful changes
4360        }
4361        """
4362        url = f"{self.base_uri}{WATCHLIST_ROUTE_PREFIX}{PORTFOLIO_GROUP_ROUTE}/add-to-group"
4363        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
4364        req_json = {"group_id": group_id, "portfolios": portfolios}
4365
4366        res = requests.post(url, json=req_json, headers=headers, **self._request_params)
4367
4368        if not res.ok:
4369            error_msg = self._try_extract_error_code(res)
4370            logger.error(error_msg)
4371            raise BoostedAPIException(f"Failed to add portfolios to portfolio group: {error_msg}")
4372
4373        data = res.json()
4374        return data

Add portfolios to a group

Parameters

group_id: str, UUID str identifying a portfolio group

portfolios: List of Dict [: portfolio_id: str rank_in_group: Optional[int] = None ]

Returns:

Dict { added: int number of successful changes }

def remove_from_portfolio_group(self, group_id: str, portfolios: List[str]) -> Dict:
4376    def remove_from_portfolio_group(
4377        self,
4378        group_id: str,
4379        portfolios: List[str],
4380    ) -> Dict:
4381        """
4382        Remove portfolios from a group
4383
4384        Parameters
4385        ----------
4386
4387        group_id: str,
4388           UUID str identifying a portfolio group
4389
4390        portfolios: List of str
4391
4392
4393        Returns:
4394        -------
4395        Dict {
4396            removed: int
4397               number of successful changes
4398        }
4399        """
4400        url = f"{self.base_uri}{WATCHLIST_ROUTE_PREFIX}{PORTFOLIO_GROUP_ROUTE}/remove-from-group"
4401        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
4402        req_json = {"group_id": group_id, "portfolios": portfolios}
4403        res = requests.post(url, json=req_json, headers=headers, **self._request_params)
4404
4405        if not res.ok:
4406            error_msg = self._try_extract_error_code(res)
4407            logger.error(error_msg)
4408            raise BoostedAPIException(
4409                f"Failed to remove portfolios from portfolio group: {error_msg}"
4410            )
4411
4412        data = res.json()
4413        return data

Remove portfolios from a group

Parameters

group_id: str, UUID str identifying a portfolio group

portfolios: List of str

Returns:

Dict { removed: int number of successful changes }

def delete_portfolio_group(self, group_id: str) -> Dict:
4415    def delete_portfolio_group(
4416        self,
4417        group_id: str,
4418    ) -> Dict:
4419        """
4420        Delete a portfolio group
4421
4422        Parameters
4423        ----------
4424
4425        group_id: str,
4426           UUID str identifying a portfolio group
4427
4428
4429        Returns:
4430        -------
4431        Dict {
4432            removed_groups: int
4433               number of successful changes
4434
4435            removed_portfolios: int
4436               number of successful changes
4437        }
4438        """
4439        url = f"{self.base_uri}{WATCHLIST_ROUTE_PREFIX}{PORTFOLIO_GROUP_ROUTE}/remove"
4440        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
4441        req_json = {"group_id": group_id}
4442        res = requests.post(url, json=req_json, headers=headers, **self._request_params)
4443
4444        if not res.ok:
4445            error_msg = self._try_extract_error_code(res)
4446            logger.error(error_msg)
4447            raise BoostedAPIException(f"Failed to delete portfolio group: {error_msg}")
4448
4449        data = res.json()
4450        return data

Delete a portfolio group

Parameters

group_id: str, UUID str identifying a portfolio group

Returns:

Dict { removed_groups: int number of successful changes

removed_portfolios: int
   number of successful changes

}

def set_portfolio_group_for_watchlist(self, portfolio_group_id: str, watchlist_id: str) -> Dict:
4452    def set_portfolio_group_for_watchlist(
4453        self,
4454        portfolio_group_id: str,
4455        watchlist_id: str,
4456    ) -> Dict:
4457        """
4458        Set portfolio group for watchlist.
4459
4460        Parameters
4461        ----------
4462
4463        portfolio_group_id: str,
4464           UUID str identifying a portfolio group
4465
4466        watchlist_id: str,
4467           UUID str identifying a watchlist
4468
4469
4470        Returns:
4471        -------
4472        Dict {
4473            success: bool
4474            errors:
4475            data: Dict
4476                changed: int
4477        }
4478        """
4479        url = f"{self.base_uri}{WATCHLIST_ROUTE_PREFIX}{DAL_WATCHLIST_ROUTE}/set-portfolio-groups/"
4480        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
4481        req_json = {"portfolio_group_id": portfolio_group_id, "watchlist_id": watchlist_id}
4482        res = requests.post(url, json=req_json, headers=headers, **self._request_params)
4483
4484        if not res.ok:
4485            error_msg = self._try_extract_error_code(res)
4486            logger.error(error_msg)
4487            raise BoostedAPIException(f"Failed to set portfolio group for watchlist: {error_msg}")
4488
4489        return res.json()

Set portfolio group for watchlist.

Parameters

portfolio_group_id: str, UUID str identifying a portfolio group

watchlist_id: str, UUID str identifying a watchlist

Returns:

Dict { success: bool errors: data: Dict changed: int }

def get_ranking_dates(self, model_id: str, portfolio_id: str) -> List[datetime.date]:
4491    def get_ranking_dates(self, model_id: str, portfolio_id: str) -> List[datetime.date]:
4492        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
4493        url = self.base_uri + f"/api/analysis/ranking-dates/{model_id}/{portfolio_id}"
4494        res = requests.get(url, headers=headers, **self._request_params)
4495        self._check_ok_or_err_with_msg(res, "Failed to get ranking dates")
4496        data = res.json().get("ranking_dates", [])
4497
4498        return [parser.parse(d).date() for d in data]
def get_prior_ranking_date( self, ranking_dates: List[datetime.date], starting_date: datetime.date) -> datetime.date:
4500    def get_prior_ranking_date(
4501        self, ranking_dates: List[datetime.date], starting_date: datetime.date
4502    ) -> datetime.date:
4503        """
4504        Given a starting date and a list of ranking dates, return the most
4505        recent previous ranking date.
4506        """
4507        # order from most recent to least
4508        ranking_dates.sort(reverse=True)
4509
4510        for d in ranking_dates:
4511            if d <= starting_date:
4512                return d
4513
4514        # if we get here, the starting date is before the earliest ranking date
4515        raise BoostedAPIException(f"No rankins exist on or before {starting_date}")

Given a starting date and a list of ranking dates, return the most recent previous ranking date.

def get_risk_groups( self, model_id: str, portfolio_id: str, date: datetime.date, use_v2: bool = False) -> List[Dict[str, Any]]:
4532    def get_risk_groups(
4533        self, model_id: str, portfolio_id: str, date: datetime.date, use_v2: bool = False
4534    ) -> List[Dict[str, Any]]:
4535        # first get the group descriptors
4536        descriptors = self._get_risk_factors_descriptors(model_id, portfolio_id, use_v2)
4537
4538        # calculate the most recent prior rankings date. This is the date
4539        # we need to use to query for risk group data.
4540        ranking_dates = self.get_ranking_dates(model_id, portfolio_id)
4541        ranking_date = self.get_prior_ranking_date(ranking_dates, date)
4542        date_str = ranking_date.strftime("%Y-%m-%d")
4543
4544        risk_factor = RISK_FACTOR_V2 if use_v2 else RISK_FACTOR
4545
4546        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
4547        url = self.base_uri + f"/api/{risk_factor}/{model_id}/{portfolio_id}/risk-groups/{date_str}"
4548        res = requests.get(url, headers=headers, **self._request_params)
4549
4550        self._check_ok_or_err_with_msg(
4551            res, f"Failed to get risk factors for {model_id=}, {portfolio_id=}, {date=}"
4552        )
4553
4554        # Response is a list of objects like:
4555        # [
4556        #   [
4557        #     0,
4558        #     14,
4559        #     1
4560        #   ],
4561        #   [
4562        #     25,
4563        #     12,
4564        #     13
4565        #   ],
4566        # 0.67013
4567        # ],
4568        #
4569        # Where each integer in the lists is a descriptor id.
4570
4571        groups = []
4572        for i, row in enumerate(res.json()):
4573            row_map: Dict[str, Any] = {}
4574            # map descriptor id to name
4575            row_map["machine"] = i + 1  # start at 1 not 0
4576            row_map["risk_group_a"] = [descriptors[i] for i in row[0]]
4577            row_map["risk_group_b"] = [descriptors[i] for i in row[1]]
4578            row_map["volatility_explained"] = row[2]
4579            groups.append(row_map)
4580
4581        return groups
def get_risk_factors_discovered_descriptors( self, model_id: str, portfolio_id: str, date: datetime.date, use_v2: bool = False) -> pandas.core.frame.DataFrame:
4583    def get_risk_factors_discovered_descriptors(
4584        self, model_id: str, portfolio_id: str, date: datetime.date, use_v2: bool = False
4585    ) -> pd.DataFrame:
4586        # first get the group descriptors
4587        descriptors = self._get_risk_factors_descriptors(model_id, portfolio_id)
4588
4589        # calculate the most recent prior rankings date. This is the date
4590        # we need to use to query for risk group data.
4591        ranking_dates = self.get_ranking_dates(model_id, portfolio_id)
4592        ranking_date = self.get_prior_ranking_date(ranking_dates, date)
4593        date_str = ranking_date.strftime("%Y-%m-%d")
4594
4595        risk_factor = RISK_FACTOR_V2 if use_v2 else RISK_FACTOR
4596
4597        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
4598        url = (
4599            self.base_uri
4600            + f"/api/{risk_factor}/{model_id}/{portfolio_id}/risk-descriptors/json/{date_str}"
4601        )
4602        res = requests.get(url, headers=headers, **self._request_params)
4603
4604        self._check_ok_or_err_with_msg(
4605            res, f"Failed to get risk factors for {model_id=}, {portfolio_id=}, {date=}"
4606        )
4607
4608        # Endpoint returns a nested list of floats
4609        df = pd.DataFrame(res.json(), columns=RISK_FACTOR_COLUMNS)
4610
4611        # This flat dataframe represents a potentially doubly nested structure
4612        # of Sector -> (high/low volatility) -> security. We don't care about
4613        # the high/low volatility rows, (which will have negative identifiers)
4614        # so we can filter these out.
4615        df = df[df["identifier"] >= 0]
4616
4617        # now, any values that had a depth of 2 should be set to a depth of 1,
4618        # since we removed the double nesting.
4619        df.replace(to_replace=2, value=1, inplace=True)
4620
4621        # This dataframe represents data that is nested on the UI, so the
4622        # "depth" field indicates which level of nesting each row is at. At this
4623        # point, a depth of 0 indicates a sector, and following depth 1 rows are
4624        # securities within the sector.
4625
4626        # Identifiers in rows with depth 1 will be gbi ids, need to convert to
4627        # symbols.
4628        gbi_ids = df[df["depth"] == 1]["identifier"].tolist()
4629        sec_info = self._get_security_info(gbi_ids)["data"]["securities"]
4630        sec_map = {s["gbiId"]: s["symbol"] for s in sec_info}
4631
4632        def convert_ids(row: pd.Series) -> pd.Series:
4633            # convert each row's "identifier" to the appropriate id type. If the
4634            # depth is 0, the identifier should be a sector, otherwise it should
4635            # be a ticker.
4636            ident = int(row["identifier"])
4637            row["identifier"] = (
4638                descriptors.get(ident).title() if row["depth"] == 0 else sec_map.get(ident)
4639            )
4640            return row
4641
4642        df["depth"] = df["depth"].astype(int)
4643        df["stock_count"] = df["stock_count"].astype(int)
4644        df = df.apply(convert_ids, axis=1)
4645        df = df.reset_index(drop=True)
4646        return df
def get_risk_factors_sectors( self, model_id: str, portfolio_id: str, date: datetime.date, use_v2: bool = False) -> pandas.core.frame.DataFrame:
4648    def get_risk_factors_sectors(
4649        self, model_id: str, portfolio_id: str, date: datetime.date, use_v2: bool = False
4650    ) -> pd.DataFrame:
4651        # first get the group descriptors
4652        sectors = {s["id"]: s["name"] for s in self._get_sector_info()}
4653
4654        # calculate the most recent prior rankings date. This is the date
4655        # we need to use to query for risk group data.
4656        ranking_dates = self.get_ranking_dates(model_id, portfolio_id)
4657        ranking_date = self.get_prior_ranking_date(ranking_dates, date)
4658        date_str = ranking_date.strftime("%Y-%m-%d")
4659
4660        risk_factor = RISK_FACTOR_V2 if use_v2 else RISK_FACTOR
4661
4662        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
4663        url = (
4664            self.base_uri
4665            + f"/api/{risk_factor}/{model_id}/{portfolio_id}/risk-sectors/json/{date_str}"
4666        )
4667        res = requests.get(url, headers=headers, **self._request_params)
4668
4669        self._check_ok_or_err_with_msg(
4670            res, f"Failed to get risk factors for {model_id=}, {portfolio_id=}, {date=}"
4671        )
4672
4673        # Endpoint returns a nested list of floats
4674        df = pd.DataFrame(res.json(), columns=RISK_FACTOR_COLUMNS)
4675
4676        # identifier is a gics sector identifier
4677        df["identifier"] = df["identifier"].apply(lambda i: sectors.get(int(i), None))
4678
4679        # This dataframe represents data that is nested on the UI, so the
4680        # "depth" field indicates which level of nesting each row is at. For
4681        # risk factors sectors, each "depth" represents a level of specificity
4682        # for the sector. E.g. Energy -> Energy Equipment -> Oil & Gas Equipment
4683        df["depth"] = df["depth"].astype(int)
4684        df["stock_count"] = df["stock_count"].astype(int)
4685        df = df.reset_index(drop=True)
4686        return df
def download_complete_portfolio_data(self, model_id: str, portfolio_id: str, download_filepath: str):
4688    def download_complete_portfolio_data(
4689        self, model_id: str, portfolio_id: str, download_filepath: str
4690    ):
4691        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
4692        url = self.base_uri + f"/api/models/{model_id}/{portfolio_id}/excel"
4693
4694        res = requests.get(url, headers=headers, **self._request_params)
4695        self._check_ok_or_err_with_msg(
4696            res, f"Failed to get full data for {model_id=}, {portfolio_id=}"
4697        )
4698
4699        with open(download_filepath, "wb") as f:
4700            f.write(res.content)
def diff_hedge_experiment_portfolio_data( self, hedge_experiment_id: str, comparison_portfolios: List[str], categories: List[str]) -> Dict:
4702    def diff_hedge_experiment_portfolio_data(
4703        self,
4704        hedge_experiment_id: str,
4705        comparison_portfolios: List[str],
4706        categories: List[str],
4707    ) -> Dict:
4708        qry = """
4709        query diffHedgeExperimentPortfolios(
4710            $input: DiffHedgeExperimentPortfoliosInput!
4711        ) {
4712            diffHedgeExperimentPortfolios(input: $input) {
4713            data {
4714                diffs {
4715                    volatility {
4716                        date
4717                        vol5D
4718                        vol10D
4719                        vol21D
4720                        vol21D
4721                        vol63D
4722                        vol126D
4723                        vol189D
4724                        vol252D
4725                        vol315D
4726                        vol378D
4727                        vol441D
4728                        vol504D
4729                    }
4730                    performance {
4731                        date
4732                        value
4733                    }
4734                    performanceGrid {
4735                        headerRow
4736                        values
4737                    }
4738                    factors {
4739                        date
4740                        momentum
4741                        growth
4742                        size
4743                        value
4744                        dividendYield
4745                        volatility
4746                    }
4747                }
4748            }
4749            errors
4750            }
4751        }
4752        """
4753        headers = {"Authorization": "ApiKey " + self.api_key}
4754        params = {
4755            "hedgeExperimentId": hedge_experiment_id,
4756            "portfolioIds": comparison_portfolios,
4757            "categories": categories,
4758        }
4759        resp = requests.post(
4760            f"{self.base_uri}/api/graphql",
4761            json={"query": qry, "variables": params},
4762            headers=headers,
4763            params=self._request_params,
4764        )
4765
4766        json_resp = resp.json()
4767
4768        # graphql endpoints typically return 200 or 400 status codes, so we must
4769        # check if we have any errors, even with a 200
4770        if (resp.ok and "errors" in json_resp) or not resp.ok:
4771            error_msg = self._try_extract_error_code(resp)
4772            logger.error(error_msg)
4773            raise BoostedAPIException(
4774                (
4775                    f"Failed to get portfolio diffs for {hedge_experiment_id=}: "
4776                    f"{resp.status_code=}; {error_msg=}"
4777                )
4778            )
4779
4780        diffs = json_resp["data"]["diffHedgeExperimentPortfolios"]["data"]["diffs"]
4781        comparisons = {}
4782        for pf, cmp in zip(comparison_portfolios, diffs):
4783            res: Dict[str, Any] = {
4784                "performance": None,
4785                "performanceGrid": None,
4786                "factors": None,
4787                "volatility": None,
4788            }
4789            if "performanceGrid" in cmp:
4790                grid = cmp["performanceGrid"]
4791                grid_df = pd.DataFrame(grid["values"], columns=grid["headerRow"])
4792                res["performanceGrid"] = grid_df
4793            if "performance" in cmp:
4794                perf_df = pd.DataFrame(cmp["performance"]).set_index("date")
4795                perf_df.index = pd.to_datetime(perf_df.index)
4796                res["performance"] = perf_df
4797            if "volatility" in cmp:
4798                vol_df = pd.DataFrame(cmp["volatility"]).set_index("date")
4799                vol_df.index = pd.to_datetime(vol_df.index)
4800                res["volatility"] = vol_df
4801            if "factors" in cmp:
4802                factors_df = pd.DataFrame(cmp["factors"]).set_index("date")
4803                factors_df.index = pd.to_datetime(factors_df.index)
4804                res["factors"] = factors_df
4805            comparisons[pf] = res
4806        return comparisons
def get_signal_strength(self, model_id: str, portfolio_id: str) -> pandas.core.frame.DataFrame:
4808    def get_signal_strength(self, model_id: str, portfolio_id: str) -> pd.DataFrame:
4809        url = self.base_uri + f"/api/analysis/signal_strength/{model_id}/{portfolio_id}"
4810        headers = {"Authorization": "ApiKey " + self.api_key}
4811
4812        logger.info(f"Retrieving portfolio signals for {model_id=}, {portfolio_id=}")
4813
4814        # Response format is a json object with a "header_row" key for column
4815        # names, and then a nested list of data.
4816        resp = requests.get(url, headers=headers, **self._request_params)
4817        self._check_ok_or_err_with_msg(
4818            resp, f"Failed to get portfolio signals for {model_id=}, {portfolio_id=}"
4819        )
4820
4821        data = resp.json()
4822
4823        df = pd.DataFrame(data=data["data"], columns=data["header_row"])
4824        df["Date"] = pd.to_datetime(df["Date"])
4825        df = df.set_index("Date")
4826        return df.astype(float)
def get_rolling_signal_strength(self, model_id: str, portfolio_id: str) -> pandas.core.frame.DataFrame:
4828    def get_rolling_signal_strength(self, model_id: str, portfolio_id: str) -> pd.DataFrame:
4829        url = self.base_uri + f"/api/analysis/signal_strength_rolling/{model_id}/{portfolio_id}"
4830        headers = {"Authorization": "ApiKey " + self.api_key}
4831
4832        logger.info(f"Retrieving rolling portfolio signals for {model_id=}, {portfolio_id=}")
4833
4834        # Response format is a json object with a "header_row" key for column
4835        # names, and then a nested list of data.
4836        resp = requests.get(url, headers=headers, **self._request_params)
4837        self._check_ok_or_err_with_msg(
4838            resp, f"Failed to get rolling portfolio signals for {model_id=}, {portfolio_id=}"
4839        )
4840
4841        data = resp.json()
4842
4843        df = pd.DataFrame(data=data["data"], columns=data["header_row"])
4844        df["Date"] = pd.to_datetime(df["Date"])
4845        df = df.set_index("Date")
4846        return df.astype(float)
def get_portfolio_quantiles( self, model_id: str, portfolio_id: str, id_type: Literal['TICKER', 'ISIN'] = 'TICKER'):
4848    def get_portfolio_quantiles(
4849        self,
4850        model_id: str,
4851        portfolio_id: str,
4852        id_type: Literal["TICKER", "ISIN"] = "TICKER",
4853    ):
4854        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
4855        date = datetime.date.today().strftime("%Y-%m-%d")
4856
4857        payload = {
4858            "model_id": model_id,
4859            "portfolio_id": portfolio_id,
4860            "fields": ["quantile"],
4861            "min_date": date,
4862            "max_date": date,
4863            "return_format": "json",
4864        }
4865        # TODO: Later change this URI to not use the watchlist prefix. It is misnamed.
4866        url = f"{self.base_uri}{WATCHLIST_ROUTE_PREFIX}{DAL_PA_ROUTE}/get-data/"
4867
4868        res: requests.Response = requests.post(
4869            url, json=payload, headers=headers, **self._request_params
4870        )
4871        self._check_ok_or_err_with_msg(res, "Unable to get quantile data")
4872
4873        resp: Dict = res.json()
4874        quantile_index = resp["field_map"]["Quantile"]
4875        quantile_data = [[c[quantile_index] for c in r] for r in resp["data"]]
4876        date_cols = pd.to_datetime(resp["columns"])
4877
4878        # Need to map gbi id's to isins or tickers
4879        gbi_ids = [int(i) for i in resp["rows"]]
4880        security_info = self._get_security_info(gbi_ids)
4881
4882        # We now have security data, go through and create a map from internal
4883        # gbi id to client facing identifier
4884        id_key = "isin" if id_type == "ISIN" else "symbol"
4885        gbi_identifier_map = {
4886            sec["gbiId"]: sec[id_key] for sec in security_info["data"]["securities"]
4887        }
4888
4889        df = pd.DataFrame(quantile_data, index=gbi_ids, columns=date_cols).transpose()
4890        df = df.rename(columns=gbi_identifier_map)
4891        return df
def get_similar_stocks( self, model_id: str, portfolio_id: str, symbol_list: List[str], date: Union[datetime.date, str], identifier_type: Literal['TICKER', 'ISIN'], preferred_country: Union[str, NoneType] = None, preferred_currency: Union[str, NoneType] = None) -> pandas.core.frame.DataFrame:
4893    def get_similar_stocks(
4894        self,
4895        model_id: str,
4896        portfolio_id: str,
4897        symbol_list: List[str],
4898        date: BoostedDate,
4899        identifier_type: Literal["TICKER", "ISIN"],
4900        preferred_country: Optional[str] = None,
4901        preferred_currency: Optional[str] = None,
4902    ) -> pd.DataFrame:
4903        date_str = convert_date(date).strftime("%Y-%m-%d")
4904
4905        sec_data = self.getGbiIdFromIdentCountryCurrencyDate(
4906            ident_country_currency_dates=[
4907                DateIdentCountryCurrency(
4908                    date=datetime.date.today().isoformat(),
4909                    identifier=s,
4910                    id_type=(
4911                        ColumnSubRole.SYMBOL if identifier_type == "TICKER" else ColumnSubRole.ISIN
4912                    ),
4913                    country=preferred_country,
4914                    currency=preferred_currency,
4915                )
4916                for s in symbol_list
4917            ]
4918        )
4919
4920        gbi_id_ident_map: Dict[int, str] = {}
4921        for sec in sec_data:
4922            ident = sec.ticker if identifier_type == "TICKER" else sec.isin_info.identifier
4923            gbi_id_ident_map[sec.gbi_id] = ident
4924        gbi_ids = list(gbi_id_ident_map.keys())
4925
4926        qry = """
4927          query GetSimilarStocks(
4928            $modelId: ID!
4929            $portfolioId: ID!
4930            $gbiIds: [Int]!
4931            $startDate: String!
4932            $endDate: String!
4933            $includeCorrelation: Boolean
4934          ) {
4935            similarStocks(
4936              modelId: $modelId,
4937              portfolioId: $portfolioId,
4938              gbiIds: $gbiIds,
4939              startDate: $startDate,
4940              endDate: $endDate,
4941              includeCorrelation: $includeCorrelation
4942            ) {
4943              gbiId
4944              overallSimilarityScore
4945              priceSimilarityScore
4946              factorSimilarityScore
4947              correlation
4948            }
4949          }
4950        """
4951        variables = {
4952            "startDate": date_str,
4953            "endDate": date_str,
4954            "modelId": model_id,
4955            "portfolioId": portfolio_id,
4956            "gbiIds": gbi_ids,
4957            "includeCorrelation": True,
4958        }
4959
4960        resp = self._get_graphql(
4961            qry, variables=variables, error_msg_prefix="Failed to get similar stocks result: "
4962        )
4963        df = pd.DataFrame(resp["data"]["similarStocks"])
4964
4965        # Now that we have the rest of the securities in the portfolio, we need
4966        # to map them back to the correct identifiers
4967        all_gbi_ids = df["gbiId"].tolist()
4968        sec_info = self._get_security_info(all_gbi_ids)
4969        for s in sec_info["data"]["securities"]:
4970            ident = s["symbol"] if identifier_type == "TICKER" else s["isin"]
4971            gbi_id_ident_map[s["gbiId"]] = ident
4972        df["identifier"] = df["gbiId"].map(gbi_id_ident_map)
4973        df = df.set_index("identifier")
4974        return df.drop("gbiId", axis=1)
def get_portfolio_trades( self, model_id: str, portfolio_id: str, start_date: Union[datetime.date, str, NoneType] = None, end_date: Union[datetime.date, str, NoneType] = None) -> pandas.core.frame.DataFrame:
4976    def get_portfolio_trades(
4977        self,
4978        model_id: str,
4979        portfolio_id: str,
4980        start_date: Optional[BoostedDate] = None,
4981        end_date: Optional[BoostedDate] = None,
4982    ) -> pd.DataFrame:
4983        if not end_date:
4984            end_date = datetime.date.today()
4985        end_date = convert_date(end_date)
4986
4987        if not start_date:
4988            # default to a year of data
4989            start_date = end_date - datetime.timedelta(days=365)
4990        start_date = convert_date(start_date)
4991
4992        start_date_str = start_date.strftime("%Y-%m-%d")
4993        end_date_str = end_date.strftime("%Y-%m-%d")
4994
4995        if end_date - start_date > datetime.timedelta(days=365 * 7):
4996            raise BoostedAPIException(
4997                f"Date range ({start_date_str}, {end_date_str}) too large, max 7 years"
4998            )
4999
5000        url = f"{self.base_uri}{ROUTE_PREFIX}{DAL_PA_ROUTE}/get-data/"
5001        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
5002        payload = {
5003            "model_id": model_id,
5004            "portfolio_id": portfolio_id,
5005            "fields": ["price", "shares_traded", "shares_owned"],
5006            "min_date": start_date_str,
5007            "max_date": end_date_str,
5008            "return_format": "json",
5009        }
5010
5011        res: requests.Response = requests.post(
5012            url, json=payload, headers=headers, **self._request_params
5013        )
5014        self._check_ok_or_err_with_msg(res, "Unable to get portfolio trades data")
5015
5016        data = res.json()
5017        gbi_ids = [int(ident) for ident in data["rows"]]
5018
5019        # need both isin and ticker to distinguish between possible duplicates
5020        isin_map = {
5021            str(s["gbiId"]): s["isin"]
5022            for s in self._get_security_info(gbi_ids)["data"]["securities"]
5023        }
5024        ticker_map = {
5025            str(s["gbiId"]): s["symbol"]
5026            for s in self._get_security_info(gbi_ids)["data"]["securities"]
5027        }
5028
5029        # construct individual dataframes for each security, then join them together
5030        dfs: List[pd.DataFrame] = []
5031        full_data = data["data"]
5032        for i, gbi_id in enumerate(data["rows"]):
5033            df = pd.DataFrame(
5034                index=pd.to_datetime(data["columns"]), columns=data["fields"], data=full_data[i]
5035            )
5036            # drop rows where no shares are owned or traded
5037            df.drop(
5038                df.loc[((df["shares_owned"] == 0.0) & (df["shares_traded"] == 0.0))].index,
5039                inplace=True,
5040            )
5041            df["isin"] = isin_map[gbi_id]
5042            df["ticker"] = ticker_map[gbi_id]
5043            dfs.append(df)
5044
5045        full_df = pd.concat(dfs)
5046        full_df["date"] = full_df.index
5047        full_df.sort_index(inplace=True)
5048        full_df.reset_index(drop=True, inplace=True)
5049
5050        # reorder the columns to match the spreadsheet
5051        columns = ["isin", "ticker", "date", *data["fields"]]
5052        return full_df[columns]
def get_ideas( self, model_id: str, portfolio_id: str, investment_horizon: Literal['1M', '3M', '1Y'] = '1M', delta_horizon: str = '1M'):
5054    def get_ideas(
5055        self,
5056        model_id: str,
5057        portfolio_id: str,
5058        investment_horizon: Literal["1M", "3M", "1Y"] = "1M",
5059        delta_horizon: str = "1M",
5060    ):
5061        if investment_horizon not in ("1M", "3M", "1Y"):
5062            raise BoostedAPIException(f"Invalid investment horizon: {investment_horizon}")
5063
5064        if delta_horizon not in ("1W", "1M", "3M", "6M", "9M", "1Y"):
5065            raise BoostedAPIException(f"Invalid delta horizon: {delta_horizon}")
5066
5067        # First compute dates based on the delta horizon. "0D" is the latest rebalance.
5068        try:
5069            dates = self._get_portfolio_rebalance_from_periods(
5070                portfolio_id=portfolio_id, rel_periods=["0D", delta_horizon]
5071            )
5072        except Exception:
5073            raise BoostedAPIException(
5074                f"Portfolio {portfolio_id} does not exist or you do not have permission to view it."
5075            )
5076        end_date = dates[0].strftime("%Y-%m-%d")
5077        start_date = dates[1].strftime("%Y-%m-%d")
5078
5079        resp = self._get_graphql(
5080            graphql_queries.GET_IDEAS_QUERY,
5081            variables={
5082                "modelId": model_id,
5083                "portfolioId": portfolio_id,
5084                "horizon": investment_horizon,
5085                "deltaHorizon": delta_horizon,
5086                "startDate": start_date,
5087                "endDate": end_date,
5088                # Note: market data date is needed to fetch market cap.
5089                # we don't fetch that data from this endpoint so we stub
5090                # out the mandatory parameter with the end date requested
5091                "marketDataDate": end_date,
5092            },
5093            error_msg_prefix="Failed to get ideas: ",
5094        )
5095        # rows is a list of dicts like:
5096        # {
5097        #   "category": "Strong Sell",
5098        #   "dividendYield": 0.0,
5099        #   "reason": "Boosted Insights has given this stock...",
5100        #   "rating": 0.458167,
5101        #   "ratingDelta": 0.438087,
5102        #   "risk": {
5103        #     "text": "high"
5104        #   },
5105        #   "security": {
5106        #     "symbol": "BA"
5107        #   }
5108        # }
5109        try:
5110            rows = resp["data"]["recommendations"]["recommendations"]
5111            data = [
5112                {
5113                    "symbol": r["security"]["symbol"],
5114                    "recommendation": r["category"],
5115                    "rating": r["rating"],
5116                    "rating_delta": r["ratingDelta"],
5117                    "dividend_yield": r["dividendYield"],
5118                    "predicted_excess_return_1m": r["ER"]["oneMonth"],
5119                    "predicted_excess_return_3m": r["ER"]["threeMonth"],
5120                    "predicted_excess_return_1y": r["ER"]["oneYear"],
5121                    "risk": r["risk"]["text"],
5122                    "reward": r["reward"]["text"],
5123                    "reason": r["reason"],
5124                }
5125                for r in rows
5126            ]
5127            df = pd.DataFrame(data)
5128            df.set_index("symbol", inplace=True)
5129        except Exception:
5130            # Don't show old exception info to client
5131            raise BoostedAPIException(
5132                "No recommendations found, try selecting another horizon."
5133            ) from None
5134
5135        return df
def get_stock_recommendations( self, model_id: str, portfolio_id: str, symbols: Union[List[str], NoneType] = None, investment_horizon: Literal['1M', '3M', '1Y'] = '1M') -> pandas.core.frame.DataFrame:
5137    def get_stock_recommendations(
5138        self,
5139        model_id: str,
5140        portfolio_id: str,
5141        symbols: Optional[List[str]] = None,
5142        investment_horizon: Literal["1M", "3M", "1Y"] = "1M",
5143    ) -> pd.DataFrame:
5144        model_stocks = self._get_model_stocks(model_id)
5145
5146        symbols_to_gbiids = {s.ticker: s.gbi_id for s in model_stocks}
5147        gbi_ids_to_symbols = {s.gbi_id: s.ticker for s in model_stocks}
5148
5149        variables: Dict[str, Any] = {
5150            "strategyId": portfolio_id,
5151        }
5152        if symbols:
5153            variables["gbiIds"] = [
5154                symbols_to_gbiids.get(symbol) for symbol in symbols if symbols_to_gbiids.get(symbol)
5155            ]
5156        try:
5157            recs = self._get_graphql(
5158                graphql_queries.MULTI_STOCK_RECOMMENDATION_QUERY,
5159                variables=variables,
5160                log_error=False,
5161            )["data"]["currentRecommendationsFull"]
5162        except BoostedAPIException:
5163            raise BoostedAPIException(f"Error getting recommendations for strategy {portfolio_id}")
5164
5165        data = []
5166        recommendation_key = f"recommendation{investment_horizon}"
5167        for rec in recs:
5168            # Keys to rec are:
5169            # ['ER', 'rewardCategories', 'riskCategories', 'reasons',
5170            #  'recommendation', 'rewardCategory', 'riskCategory']
5171            # need to flatten these out and add to a DF
5172            rec_data = rec[recommendation_key]
5173            reasons_dict = {r["type"]: r["text"] for r in rec_data["reasons"]}
5174            row = {
5175                "symbol": gbi_ids_to_symbols[rec["gbiId"]],
5176                "recommendation": rec_data["currentCategory"],
5177                "predicted_excess_return_1m": rec_data["ER"]["oneMonth"],
5178                "predicted_excess_return_3m": rec_data["ER"]["threeMonth"],
5179                "predicted_excess_return_1y": rec_data["ER"]["oneYear"],
5180                "risk": rec_data["risk"]["text"],
5181                "reward": rec_data["reward"]["text"],
5182                "reasons": reasons_dict,
5183            }
5184
5185            data.append(row)
5186        df = pd.DataFrame(data)
5187        df.set_index("symbol", inplace=True)
5188        return df
def get_stock_recommendation_reasons( self, model_id: str, portfolio_id: str, investment_horizon: Literal['1M', '3M', '1Y'] = '1M', symbols: Union[List[str], NoneType] = None) -> Dict[str, Union[List[str], NoneType]]:
5193    def get_stock_recommendation_reasons(
5194        self,
5195        model_id: str,
5196        portfolio_id: str,
5197        investment_horizon: Literal["1M", "3M", "1Y"] = "1M",
5198        symbols: Optional[List[str]] = None,
5199    ) -> Dict[str, Optional[List[str]]]:
5200        if investment_horizon not in ("1M", "3M", "1Y"):
5201            raise BoostedAPIException(f"Invalid investment horizon: {investment_horizon}")
5202
5203        # "0D" is the latest rebalance - its all we have in terms of recs
5204        dates = self._get_portfolio_rebalance_from_periods(
5205            portfolio_id=portfolio_id, rel_periods=["0D"]
5206        )
5207        date = dates[0].strftime("%Y-%m-%d")
5208
5209        model_stocks = self._get_model_stocks(model_id)
5210
5211        symbols_to_gbiids = {s.ticker: s.gbi_id for s in model_stocks}
5212        if symbols is None:  # potentially iterate through all holdings
5213            symbols = symbols_to_gbiids.keys()  # type: ignore
5214
5215        reasons: Dict[str, Optional[List[str]]] = {}
5216        for sym in symbols:
5217            # it's possible that a passed symbol was not actually a portfolio holding
5218            try:
5219                gbi_id = symbols_to_gbiids[sym]
5220            except KeyError:
5221                logger.warning(f"Symbol={sym} not found for in universe on {date=}")
5222                reasons[sym] = None
5223                continue
5224
5225            try:
5226                recs = self._get_graphql(
5227                    graphql_queries.STOCK_RECOMMENDATION_QUERY,
5228                    variables={
5229                        "modelId": model_id,
5230                        "portfolioId": portfolio_id,
5231                        "horizon": investment_horizon,
5232                        "gbiId": gbi_id,
5233                        "date": date,
5234                    },
5235                    log_error=False,
5236                )
5237                reasons[sym] = [
5238                    reason["text"] for reason in recs["data"]["stockRecommendation"]["reasons"]
5239                ]
5240            except BoostedAPIException:
5241                logger.warning(f"No recommendation for: {sym}, skipping...")
5242        return reasons
def get_stock_mapping_alternatives( self, isin: Union[str, NoneType] = None, symbol: Union[str, NoneType] = None, country: Union[str, NoneType] = None, currency: Union[str, NoneType] = None, asof_date: Union[datetime.date, str, NoneType] = None) -> Dict:
5244    def get_stock_mapping_alternatives(
5245        self,
5246        isin: Optional[str] = None,
5247        symbol: Optional[str] = None,
5248        country: Optional[str] = None,
5249        currency: Optional[str] = None,
5250        asof_date: Optional[BoostedDate] = None,
5251    ) -> Dict:
5252        """
5253        Return the stock mapping for the given criteria,
5254        also suggestions for alternate matches,
5255        if the mapping is not what is wanted
5256
5257
5258            Parameters [One of either ISIN or SYMBOL must be provided]
5259            ----------
5260            isin: Optional[str]
5261                search by ISIN
5262            symbol: Optional[str]
5263                search by Ticker Symbol
5264            country: Optional[str]
5265                Additionally filter by country code - ex: None, "ANY", "p_USA", "CAN"
5266            currency: Optional[str]
5267                Additionally filter by currency code - ex: None, "ANY", "p_USD", "CAD"
5268            asof_date: Optional[date]
5269                as of which date to perform the search, default is today()
5270
5271            Note: country/currency filter starting with "p_" indicates
5272                  only a soft preference but allows other matches
5273
5274        Returns
5275        -------
5276        Dictionary Representing this 'MapSecurityResponse' structure:
5277
5278        class MapSecurityResponse():
5279            stock_mapping: Optional[SecurityInfo]
5280               The mapping we would perform given your inputs
5281
5282            alternatives: Optional[List[SecurityInfo]]
5283               Alternative suggestions based on your input
5284
5285            error: Optional[str]
5286
5287        class SecurityInfo():
5288            gbi_id: int
5289            isin: str
5290            symbol: Optional[str]
5291            country: str
5292            currency: str
5293            name: str
5294            from_date: date
5295            to_date: date
5296            is_primary_trading_item: bool
5297
5298        """
5299
5300        url = f"{self.base_uri}/api/stock-mapping/alternatives"
5301        headers = {"Authorization": "ApiKey " + self.api_key, "Content-Type": "application/json"}
5302        req_json: Dict = {
5303            "isin": isin,
5304            "symbol": symbol,
5305            "countryPreference": country,
5306            "currencyPreference": currency,
5307        }
5308
5309        if asof_date:
5310            req_json["date"] = convert_date(asof_date).isoformat()
5311
5312        res = requests.post(url, json=req_json, headers=headers, **self._request_params)
5313
5314        if not res.ok:
5315            error_msg = self._try_extract_error_code(res)
5316            logger.error(error_msg)
5317            raise BoostedAPIException(f"Failed to get user watchlists: {error_msg}")
5318
5319        data = res.json()
5320        return data

Return the stock mapping for the given criteria, also suggestions for alternate matches, if the mapping is not what is wanted

Parameters [One of either ISIN or SYMBOL must be provided]
----------
isin: Optional[str]
    search by ISIN
symbol: Optional[str]
    search by Ticker Symbol
country: Optional[str]
    Additionally filter by country code - ex: None, "ANY", "p_USA", "CAN"
currency: Optional[str]
    Additionally filter by currency code - ex: None, "ANY", "p_USD", "CAD"
asof_date: Optional[date]
    as of which date to perform the search, default is today()

Note: country/currency filter starting with "p_" indicates
      only a soft preference but allows other matches

Returns

Dictionary Representing this 'MapSecurityResponse' structure:

class MapSecurityResponse(): stock_mapping: Optional[SecurityInfo] The mapping we would perform given your inputs

alternatives: Optional[List[SecurityInfo]]
   Alternative suggestions based on your input

error: Optional[str]

class SecurityInfo(): gbi_id: int isin: str symbol: Optional[str] country: str currency: str name: str from_date: date to_date: date is_primary_trading_item: bool

def get_pros_cons_for_stocks( self, model_id: Union[str, NoneType] = None, symbols: Union[List[str], NoneType] = None, preferred_country: Union[str, NoneType] = None, preferred_currency: Union[str, NoneType] = None) -> Dict[str, Dict[str, List]]:
5322    def get_pros_cons_for_stocks(
5323        self,
5324        model_id: Optional[str] = None,
5325        symbols: Optional[List[str]] = None,
5326        preferred_country: Optional[str] = None,
5327        preferred_currency: Optional[str] = None,
5328    ) -> Dict[str, Dict[str, List]]:
5329        if symbols:
5330            ident_objs = [
5331                DateIdentCountryCurrency(
5332                    date=datetime.date.today().strftime("%Y-%m-%d"),
5333                    identifier=symbol,
5334                    country=preferred_country,
5335                    currency=preferred_currency,
5336                    id_type=ColumnSubRole.SYMBOL,
5337                )
5338                for symbol in symbols
5339            ]
5340            sec_objs = self.getGbiIdFromIdentCountryCurrencyDate(
5341                ident_country_currency_dates=ident_objs
5342            )
5343            gbi_id_ticker_map = {sec.gbi_id: sec.ticker for sec in sec_objs if sec}
5344        elif model_id:
5345            gbi_id_ticker_map = {
5346                sec.gbi_id: sec.ticker for sec in self._get_model_stocks(model_id=model_id)
5347            }
5348        gbi_id_pros_cons_map = {}
5349        gbi_ids = list(gbi_id_ticker_map.keys())
5350        data = self._get_graphql(
5351            query=graphql_queries.GET_PROS_CONS_QUERY,
5352            variables={"gbiIds": gbi_ids},
5353            error_msg_prefix="Failed to get pros/cons:",
5354        )
5355        gbi_id_pros_cons_map = {
5356            row["gbiId"]: {"pros": row["pros"], "cons": row["cons"]}
5357            for row in data["data"]["bulkSecurityProsCons"]
5358        }
5359
5360        return {
5361            gbi_id_ticker_map[gbi_id]: pros_cons
5362            for gbi_id, pros_cons in gbi_id_pros_cons_map.items()
5363        }
def generate_theme( self, theme_name: str, stock_universes: List[boosted.api.api_type.ThemeUniverse]) -> str:
5365    def generate_theme(self, theme_name: str, stock_universes: List[ThemeUniverse]) -> str:
5366        # First get universe name and id mappings
5367        try:
5368            resp = self._get_graphql(
5369                query=graphql_queries.GET_MARKET_TRENDS_UNIVERSES_QUERY, variables={}
5370            )
5371            data = resp["data"]["getMarketTrendsUniverses"]
5372        except Exception:
5373            raise BoostedAPIException(f"Failed to load market trends universes mapping")
5374
5375        universe_name_to_id = {u["name"]: u["id"] for u in data}
5376        universe_ids = [universe_name_to_id[u.value] for u in stock_universes]
5377        try:
5378            resp = self._get_graphql(
5379                query=graphql_queries.GENERATE_THEME_QUERY,
5380                variables={"input": {"themeName": theme_name, "universeIds": universe_ids}},
5381            )
5382            data = resp["data"]["generateTheme"]
5383        except Exception:
5384            raise BoostedAPIException(f"Failed to generate theme: {theme_name}")
5385
5386        if not data["success"]:
5387            raise BoostedAPIException(f"Failed to generate theme: {theme_name}")
5388
5389        logger.info(
5390            f"Successfully generated theme: {theme_name}. The theme ID is {data['themeId']}"
5391        )
5392        return data["themeId"]
def get_themes_for_stock_universe( self, stock_universe: boosted.api.api_type.ThemeUniverse, start_date: Union[datetime.date, str, NoneType] = None, end_date: Union[datetime.date, str, NoneType] = None, language: Union[boosted.api.api_type.Language, str, NoneType] = None) -> List[Dict]:
5410    def get_themes_for_stock_universe(
5411        self,
5412        stock_universe: ThemeUniverse,
5413        start_date: Optional[BoostedDate] = None,
5414        end_date: Optional[BoostedDate] = None,
5415        language: Optional[Union[str, Language]] = None,
5416    ) -> List[Dict]:
5417        """Get all themes data for a particular stock universe
5418        (start_date, end_date) are used to calculate the theme importance for ranking purpose. If
5419        None, default to past 30 days
5420        Returns: A list of below dictionaries
5421        {
5422            themeId: str
5423            themeName: str
5424            themeImportance: float
5425            volatility: float
5426            positiveStockPerformance: float
5427            negativeStockPerformance: float
5428        }
5429        """
5430        translate = functools.partial(self.translate_text, language)
5431        # First get universe name and id mappings
5432        universe_id = self._get_stock_universe_id(stock_universe)
5433
5434        start_date_iso, end_date_iso = get_valid_iso_dates(start_date, end_date)
5435
5436        try:
5437            resp = self._get_graphql(
5438                query=graphql_queries.GET_THEMES,
5439                variables={
5440                    "type": "UNIVERSE",
5441                    "id": universe_id,
5442                    "startDate": start_date_iso,
5443                    "endDate": end_date_iso,
5444                    "deltaHorizon": "",  # not needed here
5445                },
5446            )
5447            data = resp["data"]["themes"]
5448        except Exception:
5449            raise BoostedAPIException(
5450                f"Failed to get themes for stock universe: {stock_universe.name}"
5451            )
5452
5453        for theme_data in data:
5454            theme_data["themeName"] = translate(theme_data["themeName"])
5455        return data

Get all themes data for a particular stock universe (start_date, end_date) are used to calculate the theme importance for ranking purpose. If None, default to past 30 days Returns: A list of below dictionaries { themeId: str themeName: str themeImportance: float volatility: float positiveStockPerformance: float negativeStockPerformance: float }

def get_themes_for_stock( self, isin: str, currency: Union[str, NoneType] = None, country: Union[str, NoneType] = None, start_date: Union[datetime.date, str, NoneType] = None, end_date: Union[datetime.date, str, NoneType] = None, language: Union[boosted.api.api_type.Language, str, NoneType] = None) -> List[Dict]:
5457    def get_themes_for_stock(
5458        self,
5459        isin: str,
5460        currency: Optional[str] = None,
5461        country: Optional[str] = None,
5462        start_date: Optional[BoostedDate] = None,
5463        end_date: Optional[BoostedDate] = None,
5464        language: Optional[Union[str, Language]] = None,
5465    ) -> List[Dict]:
5466        """Get all themes data for a particular stock
5467        (ISIN, currency, country) compose a unique identifier for a stock for us to map to GBI ID
5468        (start_date, end_date) are used to calculate the theme importance for ranking purpose. If
5469        None, default to past 30 days
5470
5471        Returns
5472        A list of below dictionaries
5473        {
5474            themeId: str
5475            themeName: str
5476            importanceScore: float
5477            similarityScore: float
5478            positiveThemeRelation: bool
5479            reason: String
5480        }
5481        """
5482        translate = functools.partial(self.translate_text, language)
5483        security_info = self.get_stock_mapping_alternatives(
5484            isin, country=country, currency=currency
5485        )
5486        gbi_id = security_info["stock_mapping"]["gbi_id"]
5487
5488        if (start_date and not end_date) or (end_date and not start_date):
5489            raise BoostedAPIException("Must provide both start and end dates or neither")
5490        elif not end_date and not start_date:
5491            end_date = datetime.date.today()
5492            start_date = end_date - datetime.timedelta(days=30)
5493            end_date = end_date.isoformat()
5494            start_date = start_date.isoformat()
5495        else:
5496            if isinstance(start_date, datetime.date):
5497                start_date = start_date.isoformat()
5498            if isinstance(end_date, datetime.date):
5499                end_date = end_date.isoformat()
5500
5501        try:
5502            resp = self._get_graphql(
5503                query=graphql_queries.GET_THEMES_FOR_STOCK_WITH_REASONS,
5504                variables={"gbiId": gbi_id, "startDate": start_date, "endDate": end_date},
5505            )
5506            data = resp["data"]["themesForStockWithReasons"]
5507        except Exception:
5508            raise BoostedAPIException(f"Failed to get themes for stock: {isin}")
5509
5510        for item in data:
5511            item["themeName"] = translate(item["themeName"])
5512            item["reason"] = translate(item["reason"])
5513        return data

Get all themes data for a particular stock (ISIN, currency, country) compose a unique identifier for a stock for us to map to GBI ID (start_date, end_date) are used to calculate the theme importance for ranking purpose. If None, default to past 30 days

Returns A list of below dictionaries { themeId: str themeName: str importanceScore: float similarityScore: float positiveThemeRelation: bool reason: String }

def get_stock_news( self, time_horizon: boosted.api.api_type.NewsHorizon, isin: str, currency: Union[str, NoneType] = None, country: Union[str, NoneType] = None, language: Union[boosted.api.api_type.Language, str, NoneType] = None) -> Dict:
5515    def get_stock_news(
5516        self,
5517        time_horizon: NewsHorizon,
5518        isin: str,
5519        currency: Optional[str] = None,
5520        country: Optional[str] = None,
5521        language: Optional[Union[str, Language]] = None,
5522    ) -> Dict:
5523        """
5524        The API to get a stock's news summary for a given time horizon, the topics summarized by
5525        these news and the corresponding news to these topics
5526        Returns
5527        -------
5528        A nested dictionary in the following format:
5529        {
5530            summary: str
5531            topics: [
5532                {
5533                    topicId: str
5534                    topicLabel: str
5535                    topicDescription: str
5536                    topicPolarity: str
5537                    newsItems: [
5538                        {
5539                            newsId: str
5540                            headline: str
5541                            url: str
5542                            summary: str
5543                            source: str
5544                            publishedAt: str
5545                        }
5546                    ]
5547                }
5548            ]
5549            other_news_count: int
5550        }
5551        """
5552        translate = functools.partial(self.translate_text, language)
5553        security_info = self.get_stock_mapping_alternatives(
5554            isin, country=country, currency=currency
5555        )
5556        gbi_id = security_info["stock_mapping"]["gbi_id"]
5557
5558        try:
5559            resp = self._get_graphql(
5560                query=graphql_queries.GET_STOCK_NEWS_QUERY,
5561                variables={"gbiId": gbi_id, "deltaHorizon": time_horizon.value},
5562            )
5563            data = resp["data"]
5564        except Exception:
5565            raise BoostedAPIException(f"Failed to get themes for stock: {isin}")
5566
5567        outputs: Dict[str, Any] = {}
5568        outputs["summary"] = translate(data["getStockNewsSummary"]["summary"])
5569        # Return the top 10 topics
5570        outputs["topics"] = data["getStockNewsTopics"]["topics"][:10]
5571
5572        for topic in outputs["topics"]:
5573            topic["topicLabel"] = translate(topic["topicLabel"])
5574            topic["topicDescription"] = translate(topic["topicDescription"])
5575
5576        other_news_count = 0
5577        for source_count in data["getStockNewsSummary"]["sourceCounts"]:
5578            other_news_count += source_count["count"]
5579
5580        for topic in outputs["topics"]:
5581            other_news_count -= len(topic["newsItems"])
5582
5583        outputs["other_news_count"] = other_news_count
5584
5585        return outputs

The API to get a stock's news summary for a given time horizon, the topics summarized by these news and the corresponding news to these topics

Returns

A nested dictionary in the following format: { summary: str topics: [ { topicId: str topicLabel: str topicDescription: str topicPolarity: str newsItems: [ { newsId: str headline: str url: str summary: str source: str publishedAt: str } ] } ] other_news_count: int }

def get_theme_details( self, theme_id: str, universe: boosted.api.api_type.ThemeUniverse, language: Union[boosted.api.api_type.Language, str, NoneType] = None) -> Dict[str, Any]:
5587    def get_theme_details(
5588        self,
5589        theme_id: str,
5590        universe: ThemeUniverse,
5591        language: Optional[Union[str, Language]] = None,
5592    ) -> Dict[str, Any]:
5593        translate = functools.partial(self.translate_text, language)
5594        universe_id = self._get_stock_universe_id(universe)
5595        date = datetime.date.today()
5596        prev_date = date - datetime.timedelta(days=30)
5597        result = self._get_graphql(
5598            query=graphql_queries.GET_THEME_DEEPDIVE_DETAILS,
5599            variables={
5600                "deltaHorizon": "1W",
5601                "startDate": prev_date.strftime("%Y-%m-%d"),
5602                "endDate": date.strftime("%Y-%m-%d"),
5603                "id": universe_id,
5604                "themeId": theme_id,
5605                "type": "UNIVERSE",
5606            },
5607            error_msg_prefix="Failed to get theme details",
5608        )["data"]["marketThemes"]
5609
5610        gbi_id_stock_data_map: Dict[int, Dict] = {}
5611
5612        stocks = []
5613        for stock_info in result["stockInfos"]:
5614            gbi_id_stock_data_map[stock_info["gbiId"]] = stock_info["security"]
5615            stocks.append(
5616                {
5617                    "isin": stock_info["security"]["isin"],
5618                    "name": stock_info["security"]["name"],
5619                    "reason": translate(stock_info["polarityReasonScores"]["reason"]),
5620                    "positive_theme_relation": stock_info["polarityReasonScores"][
5621                        "positiveThemeRelation"
5622                    ],
5623                    "theme_stock_impact_score": stock_info["polarityReasonScores"][
5624                        "similarityScore"
5625                    ],
5626                }
5627            )
5628
5629        impacts = []
5630        for impact in result["impactInfos"]:
5631            articles = [
5632                {
5633                    "title": newsitem["headline"],
5634                    "url": newsitem["url"],
5635                    "source": newsitem["source"],
5636                    "publish_date": newsitem["publishedAt"],
5637                }
5638                for newsitem in impact["newsItems"]
5639            ]
5640
5641            impact_stocks = []
5642            for impact_stock_data in impact["stocks"]:
5643                stock_metadata = gbi_id_stock_data_map[impact_stock_data["gbiId"]]
5644                impact_stocks.append(
5645                    {
5646                        "isin": stock_metadata["isin"],
5647                        "name": stock_metadata["name"],
5648                        "positive_impact_relation": impact_stock_data["positiveThemeRelation"],
5649                    }
5650                )
5651
5652            impact_dict = {
5653                "impact_name": translate(impact["impactName"]),
5654                "impact_description": translate(impact["impactDescription"]),
5655                "impact_score": impact["impactScore"],
5656                "articles": articles,
5657                "impact_stocks": impact_stocks,
5658            }
5659            impacts.append(impact_dict)
5660
5661        developments = []
5662        for dev in result["themeDevelopments"]:
5663            developments.append(
5664                {
5665                    "name": dev["label"],
5666                    "article_count": dev["articleCount"],
5667                    "date": parser.parse(dev["date"]).date(),
5668                    "description": dev["description"],
5669                    "is_major_development": dev["isMajorDevelopment"],
5670                    "sentiment": dev["sentiment"],
5671                    "news": [
5672                        {
5673                            "headline": entry["headline"],
5674                            "published_at": parser.parse(entry["publishedAt"]),
5675                            "source": entry["source"],
5676                            "url": entry["url"],
5677                        }
5678                        for entry in dev["news"]
5679                    ],
5680                }
5681            )
5682
5683        developments = sorted(developments, key=lambda d: d["date"], reverse=True)
5684
5685        output = {
5686            "theme_name": translate(result["themeName"]),
5687            "theme_summary": translate(result["themeDescription"]),
5688            "impacts": impacts,
5689            "stocks": stocks,
5690            "developments": developments,
5691        }
5692        return output
def get_all_theme_metadata( self, language: Union[boosted.api.api_type.Language, str, NoneType] = None) -> List[Dict[str, Any]]:
5694    def get_all_theme_metadata(
5695        self, language: Optional[Union[str, Language]] = None
5696    ) -> List[Dict[str, Any]]:
5697        translate = functools.partial(self.translate_text, language)
5698        result = self._get_graphql(
5699            graphql_queries.GET_ALL_THEMES,
5700            variables={"universeIds": None},
5701            error_msg_prefix="Failed to fetch all themes metadata",
5702        )
5703
5704        try:
5705            resp = self._get_graphql(
5706                query=graphql_queries.GET_MARKET_TRENDS_UNIVERSES_QUERY, variables={}
5707            )
5708            data = resp["data"]["getMarketTrendsUniverses"]
5709        except Exception:
5710            raise BoostedAPIException(f"Failed to load market trends universes mapping")
5711        universe_id_to_name = {u["id"]: u["name"] for u in data}
5712
5713        outputs = []
5714        for theme in result["data"]["getAllThemesForUser"]:
5715            # map universe ID to universe ticker
5716            universe_tickers = []
5717            for universe_id in theme["universeIds"]:
5718                if universe_id in universe_id_to_name:  # don't support unlisted universes - skip
5719                    universe_name = universe_id_to_name[universe_id]
5720                    ticker = ThemeUniverse.get_ticker_from_name(universe_name)
5721                    if ticker:
5722                        universe_tickers.append(ticker)
5723
5724            outputs.append(
5725                {
5726                    "theme_id": theme["themeId"],
5727                    "theme_name": translate(theme["themeName"]),
5728                    "universes": universe_tickers,
5729                }
5730            )
5731
5732        return outputs
def get_earnings_impacting_security( self, isin: str, currency: Union[str, NoneType] = None, country: Union[str, NoneType] = None, language: Union[boosted.api.api_type.Language, str, NoneType] = None) -> List[Dict[str, Any]]:
5734    def get_earnings_impacting_security(
5735        self,
5736        isin: str,
5737        currency: Optional[str] = None,
5738        country: Optional[str] = None,
5739        language: Optional[Union[str, Language]] = None,
5740    ) -> List[Dict[str, Any]]:
5741        translate = functools.partial(self.translate_text, language)
5742        date = datetime.date.today().strftime("%Y-%m-%d")
5743        company_data = self.getGbiIdFromIdentCountryCurrencyDate(
5744            ident_country_currency_dates=[
5745                DateIdentCountryCurrency(
5746                    date=date, identifier=isin, country=country, currency=currency
5747                )
5748            ]
5749        )
5750        try:
5751            gbi_id = company_data[0].gbi_id
5752        except Exception:
5753            raise BoostedAPIException(f"ISIN {isin} not found")
5754
5755        result = self._get_graphql(
5756            graphql_queries.EARNINGS_IMPACTS_CALENDAR_FOR_STOCK,
5757            variables={"date": date, "days": 180, "gbiId": gbi_id},
5758            error_msg_prefix="Failed to fetch earnings impacts data for stock",
5759        )
5760        earnings_events = result["data"]["earningsCalendarForStock"]
5761        output_events = []
5762        for event in earnings_events:
5763            if not event["impactedCompanies"]:
5764                continue
5765            fixed_event = {
5766                "event_date": event["eventDate"],
5767                "company_name": event["security"]["name"],
5768                "symbol": event["security"]["symbol"],
5769                "isin": event["security"]["isin"],
5770                "impact_reason": translate(event["impactedCompanies"][0]["reason"]),
5771            }
5772            output_events.append(fixed_event)
5773
5774        return output_events
def get_earnings_insights_for_stocks( self, isin: str, currency: Union[str, NoneType] = None, country: Union[str, NoneType] = None) -> Dict[str, Any]:
5776    def get_earnings_insights_for_stocks(
5777        self, isin: str, currency: Optional[str] = None, country: Optional[str] = None
5778    ) -> Dict[str, Any]:
5779        date = datetime.date.today().strftime("%Y-%m-%d")
5780        company_data = self.getGbiIdFromIdentCountryCurrencyDate(
5781            ident_country_currency_dates=[
5782                DateIdentCountryCurrency(
5783                    date=date, identifier=isin, country=country, currency=currency
5784                )
5785            ]
5786        )
5787        gbi_id_isin_map = {
5788            company.gbi_id: company.isin_info.identifier
5789            for company in company_data
5790            if company is not None
5791        }
5792        try:
5793            resp = self._get_graphql(
5794                query=graphql_queries.GET_EARNINGS_INSIGHTS_SUMMARIES,
5795                variables={"gbiIds": list(gbi_id_isin_map.keys())},
5796            )
5797            # list of objects with gbi id and data
5798            summaries = resp["data"]["getEarningsSummaries"]
5799            resp = self._get_graphql(
5800                query=graphql_queries.GET_EARNINGS_COMPARISONS,
5801                variables={"gbiIds": list(gbi_id_isin_map.keys())},
5802            )
5803            # list of objects with gbi id and data
5804            comparison = resp["data"]["getLatestEarningsChanges"]
5805        except Exception:
5806            raise BoostedAPIException(f"Failed to earnings insights data")
5807
5808        if not summaries:
5809            raise BoostedAPIException(
5810                (
5811                    f"Failed to find earnings insights data for {isin}"
5812                    ", please try with another security"
5813                )
5814            )
5815
5816        output: Dict[str, Any] = {}
5817        reports = sorted(summaries[0]["reports"], key=lambda r: r["date"], reverse=True)
5818        current_report = reports[0]
5819
5820        def is_aligned_formatter(acc: Tuple[List, List], cur: Dict[str, Any]):
5821            if cur["isAligned"]:
5822                acc[0].append({k: cur[k] for k in cur if k != "isAligned"})
5823            else:
5824                acc[1].append({k: cur[k] for k in cur if k != "isAligned"})
5825            return acc
5826
5827        current_report_common_remarks: Union[List[Dict[str, Any]], List]
5828        current_report_dropped_remarks: Union[List[Dict[str, Any]], List]
5829        current_report_common_remarks, current_report_dropped_remarks = functools.reduce(
5830            is_aligned_formatter, current_report["details"], ([], [])
5831        )
5832        prev_report_common_remarks: Union[List[Dict[str, Any]], List]
5833        prev_report_new_remarks: Union[List[Dict[str, Any]], List]
5834        prev_report_common_remarks, prev_report_new_remarks = functools.reduce(
5835            is_aligned_formatter, current_report["details"], ([], [])
5836        )
5837
5838        output["earnings_report"] = {
5839            "release_date": datetime.datetime.strptime(current_report["date"], "%Y-%m-%d").date(),
5840            "quarter": current_report["quarter"],
5841            "year": current_report["year"],
5842            "details": [
5843                {
5844                    "header": detail_obj["header"],
5845                    "detail": detail_obj["detail"],
5846                    "sentiment": detail_obj["sentiment"],
5847                }
5848                for detail_obj in current_report["details"]
5849            ],
5850            "call_summary": current_report["highlights"],
5851            "common_remarks": current_report_common_remarks,
5852            "dropped_remarks": current_report_dropped_remarks,
5853            "qa_summary": current_report["qaHighlights"],
5854            "qa_details": current_report["qaDetails"],
5855        }
5856        prev_report = summaries[0]["reports"][1]
5857        output["prior_earnings_report"] = {
5858            "release_date": datetime.datetime.strptime(prev_report["date"], "%Y-%m-%d").date(),
5859            "quarter": prev_report["quarter"],
5860            "year": prev_report["year"],
5861            "details": [
5862                {
5863                    "header": detail_obj["header"],
5864                    "detail": detail_obj["detail"],
5865                    "sentiment": detail_obj["sentiment"],
5866                }
5867                for detail_obj in prev_report["details"]
5868            ],
5869            "call_summary": prev_report["highlights"],
5870            "common_remarks": prev_report_common_remarks,
5871            "new_remarks": prev_report_new_remarks,
5872            "qa_summary": prev_report["qaHighlights"],
5873            "qa_details": prev_report["qaDetails"],
5874        }
5875
5876        if not comparison:
5877            output["report_comparison"] = []
5878        else:
5879            output["report_comparison"] = comparison[0]["changes"]
5880
5881        return output
def get_portfolio_inference_status(self, portfolio_id: str, inference_date: str) -> dict:
5883    def get_portfolio_inference_status(self, portfolio_id: str, inference_date: str) -> dict:
5884        url = f"{self.base_uri}/api/inference/status/{portfolio_id}/{inference_date}"
5885        headers = {"Authorization": "ApiKey " + self.api_key}
5886        res = requests.get(url, headers=headers)
5887
5888        if not res.ok:
5889            error_msg = self._try_extract_error_code(res)
5890            logger.error(error_msg)
5891            raise BoostedAPIException(
5892                f"Failed to get portfolio inference status, portfolio_id={portfolio_id}, "
5893                f"inference_date={inference_date}: {error_msg}"
5894            )
5895
5896        data = res.json()
5897        return data
def delete_portfolios(self, model_to_portfolios: Dict[str, List[str]]) -> None:
5899    def delete_portfolios(self, model_to_portfolios: Dict[str, List[str]]) -> None:
5900        """
5901        Deletes a list of portfolios
5902
5903        Args:
5904            model_to_portfolios: Mapping from model_id -> list of corresponding portfolios to delete
5905        """
5906        for model_id, portfolios in model_to_portfolios.items():
5907            for portfolio_id in portfolios:
5908                url = self.base_uri + "/api/models/{0}/constraints/{1}/delete".format(
5909                    model_id, portfolio_id
5910                )
5911                headers = {"Authorization": "ApiKey " + self.api_key}
5912                res = requests.put(url, headers=headers, **self._request_params)
5913                if not res.ok:
5914                    error_msg = self._try_extract_error_code(res)
5915                    logger.error(error_msg)
5916                    raise BoostedAPIException("Failed to delete portfolios: {0}.".format(error_msg))

Deletes a list of portfolios

Args: model_to_portfolios: Mapping from model_id -> list of corresponding portfolios to delete