本文整理汇总了Python中pythalesians.util.loggermanager.LoggerManager.debug方法的典型用法代码示例。如果您正苦于以下问题:Python LoggerManager.debug方法的具体用法?Python LoggerManager.debug怎么用?Python LoggerManager.debug使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类pythalesians.util.loggermanager.LoggerManager
的用法示例。
在下文中一共展示了LoggerManager.debug方法的5个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: __init__
# 需要导入模块: from pythalesians.util.loggermanager import LoggerManager [as 别名]
# 或者: from pythalesians.util.loggermanager.LoggerManager import debug [as 别名]
#.........这里部分代码省略.........
time_series_request : TimeSeriesRequest
contains various properties describing time series to fetched, including ticker, start & finish date etc.
Returns
-------
pandas.DataFrame
"""
tickers = time_series_request.tickers
loader = self.get_loader(time_series_request.data_source)
# check if tickers have been specified (if not load all of them for a category)
# also handle single tickers/list tickers
create_tickers = False
if tickers is None : create_tickers = True
elif isinstance(tickers, str):
if tickers == '': create_tickers = True
elif isinstance(tickers, list):
if tickers == []: create_tickers = True
if create_tickers:
time_series_request.tickers = self.config.get_tickers_list_for_category(
time_series_request.category, time_series_request.source, time_series_request.freq, time_series_request.cut)
# intraday or tick: only one ticker per cache file
if (time_series_request.freq in ['intraday', 'tick']):
data_frame_agg = self.download_intraday_tick(time_series_request, loader)
# daily: multiple tickers per cache file - assume we make one API call to vendor library
else: data_frame_agg = self.download_daily(time_series_request, loader)
if('internet_load' in time_series_request.cache_algo):
self.logger.debug("Internet loading.. ")
# signal to loader template to exit session
if loader is not None and kill_session == True: loader.kill_session()
if(time_series_request.cache_algo == 'cache_algo'):
self.logger.debug("Only caching data in memory, do not return any time series."); return
tsf = TimeSeriesFilter()
# only return time series if specified in the algo
if 'return' in time_series_request.cache_algo:
# special case for events/events-dt which is not indexed like other tables
if hasattr(time_series_request, 'category'):
if 'events' in time_series_request.category:
return data_frame_agg
try:
return tsf.filter_time_series(time_series_request, data_frame_agg)
except:
import traceback
self.logger.error(traceback.format_exc())
return None
def get_time_series_cached(self, time_series_request):
"""
get_time_series_cached - Loads time series from cache (if it exists)
Parameters
----------
time_series_request : TimeSeriesRequest
示例2: LoaderBBG
# 需要导入模块: from pythalesians.util.loggermanager import LoggerManager [as 别名]
# 或者: from pythalesians.util.loggermanager.LoggerManager import debug [as 别名]
#.........这里部分代码省略.........
data_frame = pandas.concat([events_data_frame, datetime_data_frame], axis = 1)
temp = data_frame[col]
del data_frame[col]
data_frame.index = temp
else:
data_frame = datetime_data_frame
# for all other daily/monthly/quarter data, we can use HistoricalDataRequest to Bloomberg
else:
data_frame = self.get_daily_data(time_series_request, time_series_request_vendor)
# assume one ticker only
# for intraday data we use IntradayDataRequest to Bloomberg
if (time_series_request.freq in ['intraday', 'minute', 'hourly']):
time_series_request_vendor.tickers = time_series_request_vendor.tickers[0]
data_frame = self.download_intraday(time_series_request_vendor)
cols = data_frame.columns.values
data_frame.tz_localize('UTC')
cols = time_series_request.tickers[0] + "." + cols
data_frame.columns = cols
self.logger.info("Completed request from Bloomberg.")
return data_frame
def get_daily_data(self, time_series_request, time_series_request_vendor):
data_frame = self.download_daily(time_series_request_vendor)
# convert from vendor to Thalesians tickers/fields
if data_frame is not None:
returned_fields = data_frame.columns.get_level_values(0)
returned_tickers = data_frame.columns.get_level_values(1)
if data_frame is not None:
# TODO if empty try downloading again a year later
fields = self.translate_from_vendor_field(returned_fields, time_series_request)
tickers = self.translate_from_vendor_ticker(returned_tickers, time_series_request)
ticker_combined = []
for i in range(0, len(fields)):
ticker_combined.append(tickers[i] + "." + fields[i])
data_frame.columns = ticker_combined
data_frame.index.name = 'Date'
return data_frame
def get_reference_data(self, time_series_request_vendor, time_series_request):
end = datetime.datetime.today()
end = end.replace(year = end.year + 1)
time_series_request_vendor.finish_date = end
self.logger.debug("Requesting ref for " + time_series_request_vendor.tickers[0] + " etc.")
data_frame = self.download_ref(time_series_request_vendor)
self.logger.debug("Waiting for ref...")
# convert from vendor to Thalesians tickers/fields
if data_frame is not None:
returned_fields = data_frame.columns.get_level_values(0)
returned_tickers = data_frame.columns.get_level_values(1)
if data_frame is not None:
# TODO if empty try downloading again a year later
fields = self.translate_from_vendor_field(returned_fields, time_series_request)
tickers = self.translate_from_vendor_ticker(returned_tickers, time_series_request)
ticker_combined = []
for i in range(0, len(fields)):
ticker_combined.append(tickers[i] + "." + fields[i])
data_frame.columns = ticker_combined
data_frame = data_frame.convert_objects(convert_dates = 'coerce', convert_numeric= 'coerce')
return data_frame
# implement method in abstract superclass
@abc.abstractmethod
def kill_session(self):
return
@abc.abstractmethod
def download_intraday(self, time_series_request):
return
@abc.abstractmethod
def download_daily(self, time_series_request):
return
@abc.abstractmethod
def download_ref(self, time_series_request):
return
示例3: IntradayBarRequest
# 需要导入模块: from pythalesians.util.loggermanager import LoggerManager [as 别名]
# 或者: from pythalesians.util.loggermanager.LoggerManager import debug [as 别名]
class IntradayBarRequest(Request):
def __init__(self, symbol, interval, start=None, end=None, event='TRADE'):
"""
Intraday bar request for bbg
Parameters
----------
symbols : string
interval : number of minutes
start : start date
end : end date (if None then use today)
event : (TRADE,BID,ASK,BEST_BID,BEST_ASK)
"""
Request.__init__(self)
self.logger = LoggerManager().getLogger(__name__)
assert event in ('TRADE', 'BID', 'ASK', 'BEST_BID', 'BEST_ASK')
assert isinstance(symbol, str)
if start is None:
start = datetime.today() - timedelta(30)
if end is None:
end = datetime.utcnow()
self.symbol = symbol
self.interval = interval
self.start = to_datetime(start)
self.end = to_datetime(end)
self.event = event
# response related
self.response = defaultdict(list)
def get_bbg_service_name(self):
return '//blp/refdata'
def get_bbg_request(self, svc, session):
# create the bbg request object
start, end = self.start, self.end
request = svc.CreateRequest('IntradayBarRequest')
request.Set('security', self.symbol)
request.Set('interval', self.interval)
request.Set('eventType', self.event)
request.Set('startDateTime', session.CreateDatetime(start.year, start.month, start.day, start.hour, start.minute))
request.Set('endDateTime', session.CreateDatetime(end.year, end.month, end.day, end.hour, end.minute))
self.logger.info("Fetching intraday data for " + str(self.symbol) + " from "
+ start.strftime('%d/%m/%Y') + " to " + end.strftime('%d/%m/%Y'))
return request
def on_event(self, evt, is_final):
"""
on_event - This is invoked from in response to COM PumpWaitingMessages - different thread
"""
response = self.response
self.logger.debug("Receiving data from Bloomberg...")
for msg in XmlHelper.message_iter(evt):
bars = msg.GetElement('barData').GetElement('barTickData')
self.logger.debug("Read message...")
for i in range(bars.NumValues):
bar = bars.GetValue(i)
ts = bar.GetElement(0).Value
dt = datetime(ts.year, ts.month, ts.day, ts.hour, ts.minute)
response['time'].append(dt)
response['open'].append(bar.GetElement(1).Value)
response['high'].append(bar.GetElement(2).Value)
response['low'].append(bar.GetElement(3).Value)
response['close'].append(bar.GetElement(4).Value)
response['volume'].append(bar.GetElement(5).Value)
response['events'].append(bar.GetElement(6).Value)
if (i % 20000 == 0):
dt_str = dt.strftime('%d/%m/%Y')
self.logger.debug("Processing " + dt_str)
self.logger.debug("Finished processing for ticker.")
if is_final:
idx = response.pop('time')
self.response = DataFrame(response, columns=['open', 'high', 'low', 'close', 'volume', 'events'],
index=idx)
self.response.index.name = 'Date'
self.response = self.response.astype('float32')
示例4: __init__
# 需要导入模块: from pythalesians.util.loggermanager import LoggerManager [as 别名]
# 或者: from pythalesians.util.loggermanager.LoggerManager import debug [as 别名]
#.........这里部分代码省略.........
contains various properties describing time series to fetched, including ticker, start & finish date etc.
Returns
-------
pandas.DataFrame
"""
tickers = time_series_request.tickers
loader = self.get_loader(time_series_request.data_source)
# check if tickers have been specified (if not load all of them for a category)
# also handle single tickers/list tickers
create_tickers = False
if tickers is None :
create_tickers = True
elif isinstance(tickers, str):
if tickers == '': create_tickers = True
elif isinstance(tickers, list):
if tickers == []: create_tickers = True
if create_tickers:
time_series_request.tickers = self.config.get_tickers_list_for_category(
time_series_request.category, time_series_request.source, time_series_request.freq, time_series_request.cut)
# intraday or tick: only one ticker per cache file
if (time_series_request.freq in ['intraday', 'tick']):
data_frame_agg = self.download_intraday_tick(time_series_request, loader)
# daily: multiple tickers per cache file - assume we make one API call to vendor library
else: data_frame_agg = self.download_daily(time_series_request, loader)
if('internet_load' in time_series_request.cache_algo):
self.logger.debug("Internet loading.. ")
# signal to loader template to exit session
# if loader is not None and kill_session == True: loader.kill_session()
if(time_series_request.cache_algo == 'cache_algo'):
self.logger.debug("Only caching data in memory, do not return any time series."); return
tsf = TimeSeriesFilter()
# only return time series if specified in the algo
if 'return' in time_series_request.cache_algo:
# special case for events/events-dt which is not indexed like other tables
if hasattr(time_series_request, 'category'):
if 'events' in time_series_request.category:
return data_frame_agg
try:
return tsf.filter_time_series(time_series_request, data_frame_agg)
except:
import traceback
self.logger.error(traceback.format_exc())
return None
def get_time_series_cached(self, time_series_request):
"""
get_time_series_cached - Loads time series from cache (if it exists)
Parameters
----------
time_series_request : TimeSeriesRequest
示例5: ReferenceDataRequest
# 需要导入模块: from pythalesians.util.loggermanager import LoggerManager [as 别名]
# 或者: from pythalesians.util.loggermanager.LoggerManager import debug [as 别名]
class ReferenceDataRequest(Request):
def __init__(self, symbols, fields, overrides=None, response_type='frame', ignore_security_error=0, ignore_field_error=0):
"""
response_type: (frame, map) how to return the results
"""
assert response_type in ('frame', 'map')
Request.__init__(self, ignore_security_error=ignore_security_error, ignore_field_error=ignore_field_error)
self.symbols = isinstance(symbols, str) and [symbols] or symbols
self.fields = isinstance(fields, str) and [fields] or fields
self.overrides = overrides
# response related
self.response = {} if response_type == 'map' else defaultdict(list)
self.response_type = response_type
self.logger = LoggerManager().getLogger(__name__)
def get_bbg_service_name(self):
return '//blp/refdata'
def quick_override(self, request, fieldId, val):
o = request.GetElement('overrides').AppendElment()
o.SetElement('fieldId', fieldId)
o.SetElement('value', val)
def get_bbg_request(self, svc, session):
# create the bbg request object
request = svc.CreateRequest('ReferenceDataRequest')
[request.GetElement('securities').AppendValue(sec) for sec in self.symbols]
[request.GetElement('fields').AppendValue(fld) for fld in self.fields]
#self.quick_override(request, 'START_DT', '19990101')
#self.quick_override(request, 'END_DT', '20200101')
self.quick_override(request, 'TIME_ZONE_OVERRIDE', 23) # force GMT time
Request.apply_overrides(request, self.overrides)
return request
def on_security_node(self, node):
sid = XmlHelper.get_child_value(node, 'security')
farr = node.GetElement('fieldData')
fdata = XmlHelper.get_child_values(farr, self.fields)
self.logger.debug("Returning reference data...")
assert len(fdata) == len(self.fields), 'field length must match data length'
if self.response_type == 'map':
self.response[sid] = fdata
else:
self.response['security'].append(sid)
[self.response[f].append(d) for f, d in zip(self.fields, fdata)]
# Add any field errors if
ferrors = XmlHelper.get_field_errors(node)
ferrors and self.field_errors.extend(ferrors)
def on_event(self, evt, is_final):
""" this is invoked from in response to COM PumpWaitingMessages - different thread """
for msg in XmlHelper.message_iter(evt):
for node, error in XmlHelper.security_iter(msg.GetElement('securityData')):
if error:
self.security_errors.append(error)
else:
self.on_security_node(node)
if is_final and self.response_type == 'frame':
index = self.response.pop('security')
frame = DataFrame(self.response, columns=self.fields, index=index)
frame.index.name = 'security'
self.response = frame