Module boosted.api.api_client

Classes

class BoostedAPIException (value, data=None)

Common base class for all non-exit exceptions.

Ancestors

  • builtins.Exception
  • builtins.BaseException
class BoostedClient (api_key, override_uri=None, debug=False, proxy=None, disable_verify_ssl=False)

Parameters

api_key : str
Your API key provided by the Boosted application. See your profile to generate a new key.
proxy : str
Your organization may require the use of a proxy for access. The address of a HTTPS proxy in the format of
:. Examples are "123.456.789:123" or "my.proxy.com:123". Do not prepend with "https://".
disable_verify_ssl : bool
Your networking setup may be behind a firewall which performs SSL inspection. Either set the REQUESTS_CA_BUNDLE environment variable to point to the location of a custom certificate bundle, or set this parameter to True to disable SSL verification as a workaround.

Methods

def abort_chunked_upload(self, dataset_id, chunk_id)
def addSignalsToUploadedModel(self, model_id, csv_data, timeout=600)
def addToUploadedModel(self, model_id, csv_data, timeout=600)
def add_dependent_data(self, dataset_id, csv_data, timeout=600, block=True, data_type=HISTORICAL, no_exception_on_chunk_error=False)
def add_dependent_dataset(self, dataset, datasetName='DependentDataset', schema=None, timeout=600, block=True)
def add_dependent_dataset_with_warnings(self, dataset, datasetName='DependentDataset', schema=None, timeout=600, block=True, no_exception_on_chunk_error=False)
def add_global_data(self, dataset_id, csv_data, timeout=600, block=True, data_type=HISTORICAL, no_exception_on_chunk_error=False)
def add_global_dataset(self, dataset, datasetName='GlobalDataset', schema=None, timeout=600, block=True)
def add_global_dataset_with_warnings(self, dataset, datasetName='GlobalDataset', schema=None, timeout=600, block=True, no_exception_on_chunk_error=False)
def add_independent_data(self, dataset_id, csv_data, timeout=600, block=True, data_type=HISTORICAL, no_exception_on_chunk_error=False)
def add_independent_dataset(self, dataset, datasetName='IndependentDataset', schema=None, timeout=600, block=True)
def add_independent_dataset_with_warnings(self, dataset, datasetName='IndependentDataset', schema=None, timeout=600, block=True, no_exception_on_chunk_error=False)
def add_uploaded_model_data(self, url, csv_data, request_data, timeout=600)
def check_dataset_ingestion_completion(self, dataset_id, chunk_id, start_time)
def chunk_and_upload_data(self, dataset_id, chunk_id, csv_data, timeout=600, no_exception_on_chunk_error=False)
def commit_chunked_upload(self, dataset_id, chunk_id, data_type, block=True)
def createBlacklist(self, isin, long_short=2, start_date=datetime.date(2022, 4, 28), end_date='4000-01-01', model_id=None)
def createDataset(self, schema)
def createPortfolioWithPortfolioSettings(self, model_id, portfolio_name, portfolio_description, portfolio_settings, timeout=600)
def createSignalsModel(self, csv_data, model_name, timeout=600)
def deleteBlacklist(self, blacklist_id)
def export_data(self, dataset_id, start=datetime.date(1997, 5, 4), end=datetime.date(2022, 4, 28), timeout=600)
def export_dependent_data(self, dataset_id, start=datetime.date(1997, 5, 4), end=datetime.date(2022, 4, 28), timeout=600)
def export_global_data(self, dataset_id, start=datetime.date(1997, 5, 4), end=datetime.date(2022, 4, 28), timeout=600)
def export_independent_data(self, dataset_id, start=datetime.date(1997, 5, 4), end=datetime.date(2022, 4, 28), timeout=600)
def getAllocationsByDates(self, portfolio_id, dates=None)
def getAllocationsForDate(self, portfolio_id, date, rollback_to_last_available_date)
def getAllocationsForDateV2(self, portfolio_id, date, rollback_to_last_available_date)
def getBlacklist(self, blacklist_id)
def getBlacklists(self, model_id=None, company_id=None, last_N=None)
def getDatasetDates(self, dataset_id)
def getDenseSignals(self, model_id, portfolio_id, file_name=None, location='./')
def getFeatureImportance(self, model_id, date, N=None)
def getGbiIdFromIsinCountryCurrencyDate(self, isin_country_currency_dates, timeout=600)
def getHistoricalTradeDates(self, portfolio_id, start_date=None, end_date=None)
def getPortfolioSettings(self, portfolio_id, timeout=600)
def getPortfolioStatus(self, model_id, portfolio_id, job_date)
def getRanking2DateAnalysisFile(self, model_id, portfolio_id, date, file_name=None, location='./')
def getRanking2DateExplainFile(self, model_id, portfolio_id, date, file_name=None, location='./')
def getRankingAnalysis(self, model_id, date)
def getRankingExplain(self, model_id, date)
def getRankingsForAllDates(self, portfolio_id, dates=None)
def getRankingsForDate(self, portfolio_id, date, rollback_to_last_available_date)
def getSignalsForAllDates(self, portfolio_id, dates=None)
def getSignalsForDate(self, portfolio_id, date, rollback_to_last_available_date)
def getSignalsFromUploadedModel(self, model_id, date=None)
def getTearSheet(self, model_id, portfolio_id, start_date=None, end_date=None, block=False)
def getTradeExplain(self, portfolio_id, date=None)
def getUniverse(self, modelId, date=None)
def get_csv_buffer(self)
def get_dataset_schema(self, dataset_id)
def get_inference(self, model_id, inference_date=datetime.date(2022, 4, 28), block=False, timeout_minutes=30)
def query_dataset(self, dataset_id)
def sendModelRecalc(self, model_id)
def setup_chunk_and_upload_data(self, dataset_id, csv_data, data_type, timeout=600, block=True, no_exception_on_chunk_error=False)
def start_chunked_upload(self, dataset_id)
def updateBlacklist(self, blacklist_id, long_short=None, start_date=None, end_date=None)
def updateUniverse(self, modelId, universe_df, date=datetime.date(2022, 4, 29))
def upload_dataset_chunk(self, chunk_descriptor, dataset_id, chunk_id, csv_data, timeout=600, no_exception_on_chunk_error=False)
def validate_dataframe(self, df)