当前位置: 首页>>代码示例>>Python>>正文


Python LoggerManager.error方法代码示例

本文整理汇总了Python中pythalesians.util.loggermanager.LoggerManager.error方法的典型用法代码示例。如果您正苦于以下问题:Python LoggerManager.error方法的具体用法?Python LoggerManager.error怎么用?Python LoggerManager.error使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在pythalesians.util.loggermanager.LoggerManager的用法示例。


在下文中一共展示了LoggerManager.error方法的5个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: LoaderQuandl

# 需要导入模块: from pythalesians.util.loggermanager import LoggerManager [as 别名]
# 或者: from pythalesians.util.loggermanager.LoggerManager import error [as 别名]
class LoaderQuandl(LoaderTemplate):

    def __init__(self):
        super(LoaderQuandl, self).__init__()
        self.logger = LoggerManager().getLogger(__name__)

    # implement method in abstract superclass
    def load_ticker(self, time_series_request):
        time_series_request_vendor = self.construct_vendor_time_series_request(time_series_request)

        self.logger.info("Request Quandl data")

        data_frame = self.download_daily(time_series_request_vendor)

        if data_frame is None or data_frame.index is []: return None

        # convert from vendor to Thalesians tickers/fields
        if data_frame is not None:
            returned_tickers = data_frame.columns

        if data_frame is not None:
            # tidy up tickers into a format that is more easily translatable
            returned_tickers = [x.replace(' - Value', '') for x in returned_tickers]
            returned_tickers = [x.replace(' - VALUE', '') for x in returned_tickers]
            returned_tickers = [x.replace('.', '/') for x in returned_tickers]

            fields = self.translate_from_vendor_field(['close' for x in returned_tickers], time_series_request)
            tickers = self.translate_from_vendor_ticker(returned_tickers, time_series_request)

            ticker_combined = []

            for i in range(0, len(fields)):
                ticker_combined.append(tickers[i] + "." + fields[i])

            data_frame.columns = ticker_combined
            data_frame.index.name = 'Date'

        self.logger.info("Completed request from Quandl.")

        return data_frame

    def download_daily(self, time_series_request):
        trials = 0

        data_frame = None

        while(trials < 5):
            try:
                data_frame = Quandl.get(time_series_request.tickers, authtoken=Constants().quandl_api_key, trim_start=time_series_request.start_date,
                            trim_end=time_series_request.finish_date)

                break
            except:
                trials = trials + 1
                self.logger.info("Attempting... " + str(trials) + " request to download from Quandl")

        if trials == 5:
            self.logger.error("Couldn't download from Quandl after several attempts!")

        return data_frame
开发者ID:BryanFletcher,项目名称:pythalesians,代码行数:62,代码来源:loaderquandl.py

示例2: BBGLowLevelRef

# 需要导入模块: from pythalesians.util.loggermanager import LoggerManager [as 别名]
# 或者: from pythalesians.util.loggermanager.LoggerManager import error [as 别名]
class BBGLowLevelRef(BBGLowLevelTemplate):

    def __init__(self):
        super(BBGLowLevelRef, self).__init__()

        self.logger = LoggerManager().getLogger(__name__)
        self._options = []

    # populate options for Bloomberg request for asset intraday request
    def fill_options(self, time_series_request):
        self._options = OptionsBBG()

        self._options.security = time_series_request.tickers
        self._options.startDateTime = time_series_request.start_date
        self._options.endDateTime = time_series_request.finish_date
        self._options.fields = time_series_request.fields

        return self._options

    def process_message(self, msg):
        data = collections.defaultdict(dict)

        # process received events
        securityDataArray = msg.getElement('securityData')

        index = 0

        for securityData in list(securityDataArray.values()):
            ticker = securityData.getElementAsString("security")
            fieldData = securityData.getElement("fieldData")

            for field in fieldData.elements():
                if not field.isValid():
                    field_name = "%s" % field.name()

                    self.logger.error(field_name + " is NULL")
                elif field.isArray():
                    # iterate over complex data returns.
                    field_name = "%s" % field.name()

                    for i, row in enumerate(field.values()):
                        data[(field_name, ticker)][index] = re.findall(r'"(.*?)"', "%s" % row)[0]

                        index = index + 1
                # else:
                    # vals.append(re.findall(r'"(.*?)"', "%s" % row)[0])
                    # print("%s = %s" % (field.name(), field.getValueAsString()))

            fieldExceptionArray = securityData.getElement("fieldExceptions")

            for fieldException in list(fieldExceptionArray.values()):
                errorInfo = fieldException.getElement("errorInfo")
                print(errorInfo.getElementAsString("category"), ":", \
                    fieldException.getElementAsString("fieldId"))

        data_frame = pandas.DataFrame(data)

        # if obsolete ticker could return no values
        if (not(data_frame.empty)):
            data_frame.columns = pandas.MultiIndex.from_tuples(data, names=['field', 'ticker'])
            self.logger.info("Reading: " + ticker + ' ' + str(data_frame.index[0]) + ' - ' + str(data_frame.index[-1]))
        else:
            return None

        return data_frame

    def combine_slices(self, data_frame, data_frame_slice):
        if (data_frame_slice.columns.get_level_values(1).values[0]
            not in data_frame.columns.get_level_values(1).values):

            return data_frame.join(data_frame_slice, how="outer")

        return data_frame

    # create request for data
    def send_bar_request(self, session, eventQueue):
        refDataService = session.getService("//blp/refdata")
        request = refDataService.createRequest('ReferenceDataRequest')

        self.add_override(request, 'TIME_ZONE_OVERRIDE', 23)    # force GMT time
        self.add_override(request, 'START_DT', self._options.startDateTime.strftime('%Y%m%d'))
        self.add_override(request, 'END_DT', self._options.endDateTime.strftime('%Y%m%d'))

        # only one security/eventType per request
        for field in self._options.fields:
            request.getElement("fields").appendValue(field)

        for security in self._options.security:
            request.getElement("securities").appendValue(security)

        self.logger.info("Sending Bloomberg Ref Request:" + str(request))
        session.sendRequest(request)
开发者ID:humdings,项目名称:pythalesians,代码行数:94,代码来源:loaderbbgopen.py

示例3: __init__

# 需要导入模块: from pythalesians.util.loggermanager import LoggerManager [as 别名]
# 或者: from pythalesians.util.loggermanager.LoggerManager import error [as 别名]

#.........这里部分代码省略.........
            time_series_request.tickers = self.config.get_tickers_list_for_category(
            time_series_request.category, time_series_request.source, time_series_request.freq, time_series_request.cut)

        # intraday or tick: only one ticker per cache file
        if (time_series_request.freq in ['intraday', 'tick']):
            data_frame_agg = self.download_intraday_tick(time_series_request, loader)

        # daily: multiple tickers per cache file - assume we make one API call to vendor library
        else: data_frame_agg = self.download_daily(time_series_request, loader)

        if('internet_load' in time_series_request.cache_algo):
            self.logger.debug("Internet loading.. ")

            # signal to loader template to exit session
            if loader is not None and kill_session == True: loader.kill_session()

        if(time_series_request.cache_algo == 'cache_algo'):
            self.logger.debug("Only caching data in memory, do not return any time series."); return

        tsf = TimeSeriesFilter()

        # only return time series if specified in the algo
        if 'return' in time_series_request.cache_algo:
            # special case for events/events-dt which is not indexed like other tables
            if hasattr(time_series_request, 'category'):
                if 'events' in time_series_request.category:
                    return data_frame_agg

            try:
                return tsf.filter_time_series(time_series_request, data_frame_agg)
            except:
                import traceback

                self.logger.error(traceback.format_exc())

                return None

    def get_time_series_cached(self, time_series_request):
        """
        get_time_series_cached - Loads time series from cache (if it exists)

        Parameters
        ----------
        time_series_request : TimeSeriesRequest
            contains various properties describing time series to fetched, including ticker, start & finish date etc.

        Returns
        -------
        pandas.DataFrame
        """

        if (time_series_request.freq == "intraday"):
            ticker = time_series_request.tickers
        else:
            ticker = None

        fname = self.create_time_series_hash_key(time_series_request, ticker)

        if (fname in self._time_series_cache):
            data_frame = self._time_series_cache[fname]

            tsf = TimeSeriesFilter()

            return tsf.filter_time_series(time_series_request, data_frame)

        return None
开发者ID:humdings,项目名称:pythalesians,代码行数:70,代码来源:lighttimeseriesfactory.py

示例4: __init__

# 需要导入模块: from pythalesians.util.loggermanager import LoggerManager [as 别名]
# 或者: from pythalesians.util.loggermanager.LoggerManager import error [as 别名]

#.........这里部分代码省略.........
            time_series_request.tickers = self.config.get_tickers_list_for_category(
            time_series_request.category, time_series_request.source, time_series_request.freq, time_series_request.cut)

        # intraday or tick: only one ticker per cache file
        if (time_series_request.freq in ['intraday', 'tick']):
            data_frame_agg = self.download_intraday_tick(time_series_request, loader)

        # daily: multiple tickers per cache file - assume we make one API call to vendor library
        else: data_frame_agg = self.download_daily(time_series_request, loader)

        if('internet_load' in time_series_request.cache_algo):
            self.logger.debug("Internet loading.. ")

            # signal to loader template to exit session
            # if loader is not None and kill_session == True: loader.kill_session()

        if(time_series_request.cache_algo == 'cache_algo'):
            self.logger.debug("Only caching data in memory, do not return any time series."); return

        tsf = TimeSeriesFilter()

        # only return time series if specified in the algo
        if 'return' in time_series_request.cache_algo:
            # special case for events/events-dt which is not indexed like other tables
            if hasattr(time_series_request, 'category'):
                if 'events' in time_series_request.category:
                    return data_frame_agg

            try:
                return tsf.filter_time_series(time_series_request, data_frame_agg)
            except:
                import traceback

                self.logger.error(traceback.format_exc())

                return None

    def get_time_series_cached(self, time_series_request):
        """
        get_time_series_cached - Loads time series from cache (if it exists)

        Parameters
        ----------
        time_series_request : TimeSeriesRequest
            contains various properties describing time series to fetched, including ticker, start & finish date etc.

        Returns
        -------
        pandas.DataFrame
        """

        if (time_series_request.freq == "intraday"):
            ticker = time_series_request.tickers
        else:
            ticker = None

        fname = self.create_time_series_hash_key(time_series_request, ticker)

        if (fname in self._time_series_cache):
            data_frame = self._time_series_cache[fname]

            tsf = TimeSeriesFilter()

            return tsf.filter_time_series(time_series_request, data_frame)

        return None
开发者ID:droter,项目名称:pythalesians,代码行数:70,代码来源:lighttimeseriesfactory.py

示例5: __init__

# 需要导入模块: from pythalesians.util.loggermanager import LoggerManager [as 别名]
# 或者: from pythalesians.util.loggermanager.LoggerManager import error [as 别名]
class HistEconDataFactory:
    def __init__(self):
        self.logger = LoggerManager().getLogger(__name__)

        self._all_econ_tickers = pandas.read_csv(Constants().all_econ_tickers)
        self._econ_country_codes = pandas.read_csv(Constants().econ_country_codes)
        self._econ_country_groups = pandas.read_csv(Constants().econ_country_groups)

        self.time_series_factory = LightTimeSeriesFactory()

        # if Constants().default_time_series_factory == 'lighttimeseriesfactory':
        #     self.time_series_factory = LightTimeSeriesFactory()
        # else:
        #     self.time_series_factory = CachedTimeSeriesFactory()
        # return

    def get_economic_data_history(
        self, start_date, finish_date, country_group, data_type, source="fred", cache_algo="internet_load_return"
    ):

        # vendor_country_codes = self.fred_country_codes[country_group]
        # vendor_pretty_country = self.fred_nice_country_codes[country_group]

        if isinstance(country_group, list):
            pretty_country_names = country_group
        else:
            # get all the country names in the country_group
            pretty_country_names = list(
                self._econ_country_groups[self._econ_country_groups["Country Group"] == country_group]["Country"]
            )

        # construct the pretty tickers
        pretty_tickers = [x + "-" + data_type for x in pretty_country_names]

        # get vendor tickers
        vendor_tickers = []

        for pretty_ticker in pretty_tickers:
            vendor_ticker = list(
                self._all_econ_tickers[self._all_econ_tickers["Full Code"] == pretty_ticker][source].values
            )

            if vendor_ticker == []:
                vendor_ticker = None
                self.logger.error("Could not find match for " + pretty_ticker)
            else:
                vendor_ticker = vendor_ticker[0]

            vendor_tickers.append(vendor_ticker)

        vendor_fields = ["close"]

        if source == "bloomberg":
            vendor_fields = ["PX_LAST"]

        time_series_request = TimeSeriesRequest(
            start_date=start_date,  # start date
            finish_date=finish_date,  # finish date
            category="economic",
            freq="daily",  # intraday data
            data_source=source,  # use Bloomberg as data source
            cut="LOC",
            tickers=pretty_tickers,
            fields=["close"],  # which fields to download
            vendor_tickers=vendor_tickers,
            vendor_fields=vendor_fields,  # which Bloomberg fields to download
            cache_algo=cache_algo,
        )  # how to return data

        return self.time_series_factory.harvest_time_series(time_series_request)

    def grasp_coded_entry(self, df, index):
        df = df.ix[index:].stack()
        df = df.reset_index()
        df.columns = ["Date", "Name", "Val"]

        countries = df["Name"]

        countries = [x.split("-", 1)[0] for x in countries]

        df["Code"] = sum(
            [list(self._econ_country_codes[self._econ_country_codes["Country"] == x]["Code"]) for x in countries], []
        )

        return df
开发者ID:swaraj007,项目名称:pythalesians,代码行数:87,代码来源:histecondatafactory.py


注:本文中的pythalesians.util.loggermanager.LoggerManager.error方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。