本文整理汇总了Python中zipline.utils.test_utils.tmp_asset_finder函数的典型用法代码示例。如果您正苦于以下问题:Python tmp_asset_finder函数的具体用法?Python tmp_asset_finder怎么用?Python tmp_asset_finder使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了tmp_asset_finder函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: test_id_ffill_out_of_window_macro_dataset
def test_id_ffill_out_of_window_macro_dataset(self):
"""
input (df):
asof_date timestamp other value
0 2013-12-22 2013-12-22 NaN 0
1 2013-12-23 2013-12-23 1 NaN
2 2013-12-24 2013-12-24 NaN NaN
output (expected):
other value
2014-01-01 Equity(65 [A]) 1 0
Equity(66 [B]) 1 0
Equity(67 [C]) 1 0
2014-01-02 Equity(65 [A]) 1 0
Equity(66 [B]) 1 0
Equity(67 [C]) 1 0
2014-01-03 Equity(65 [A]) 1 0
Equity(66 [B]) 1 0
Equity(67 [C]) 1 0
"""
dates = self.dates - timedelta(days=10)
df = pd.DataFrame(
{"value": (0, np.nan, np.nan), "other": (np.nan, 1, np.nan), "asof_date": dates, "timestamp": dates}
)
fields = OrderedDict(self.macro_dshape.measure.fields)
fields["other"] = fields["value"]
with tmp_asset_finder() as finder:
expected = pd.DataFrame(
np.array([[0, 1], [0, 1], [0, 1], [0, 1], [0, 1], [0, 1], [0, 1], [0, 1], [0, 1]]),
columns=["value", "other"],
index=pd.MultiIndex.from_product((self.dates, finder.retrieve_all(self.sids))),
).sort_index(axis=1)
self._test_id(df, var * Record(fields), expected, finder, ("value", "other"))
示例2: _test_id
def _test_id(self, df, dshape, expected, finder, add):
expr = bz.Data(df, name='expr', dshape=dshape)
loader = BlazeLoader()
ds = from_blaze(
expr,
loader=loader,
no_deltas_rule=no_deltas_rules.ignore,
missing_values=self.missing_values,
)
p = Pipeline()
for a in add:
p.add(getattr(ds, a).latest, a)
dates = self.dates
with tmp_asset_finder() as finder:
result = SimplePipelineEngine(
loader,
dates,
finder,
).run_pipeline(p, dates[0], dates[-1])
assert_frame_equal(
result,
_utc_localize_index_level_0(expected),
check_dtype=False,
)
示例3: test_id_macro_dataset_multiple_columns
def test_id_macro_dataset_multiple_columns(self):
"""
input (df):
asof_date timestamp other value
0 2014-01-01 2014-01-01 1 0
3 2014-01-02 2014-01-02 2 1
6 2014-01-03 2014-01-03 3 2
output (expected):
other value
2014-01-01 Equity(65 [A]) 1 0
Equity(66 [B]) 1 0
Equity(67 [C]) 1 0
2014-01-02 Equity(65 [A]) 2 1
Equity(66 [B]) 2 1
Equity(67 [C]) 2 1
2014-01-03 Equity(65 [A]) 3 2
Equity(66 [B]) 3 2
Equity(67 [C]) 3 2
"""
df = self.macro_df.copy()
df["other"] = df.value + 1
fields = OrderedDict(self.macro_dshape.measure.fields)
fields["other"] = fields["value"]
asset_info = asset_infos[0][0]
with tmp_asset_finder(equities=asset_info) as finder:
expected = pd.DataFrame(
np.array([[0, 1], [1, 2], [2, 3]]).repeat(3, axis=0),
index=pd.MultiIndex.from_product((df.timestamp, finder.retrieve_all(asset_info.index))),
columns=("value", "other"),
).sort_index(axis=1)
self._test_id(df, var * Record(fields), expected, finder, ("value", "other"))
示例4: test_id
def test_id(self):
expr = bz.Data(self.df, name='expr', dshape=self.dshape)
loader = BlazeLoader()
ds = from_blaze(
expr,
loader=loader,
no_deltas_rule=no_deltas_rules.ignore,
)
p = Pipeline()
p.add(ds.value.latest, 'value')
dates = self.dates
with tmp_asset_finder() as finder:
result = SimplePipelineEngine(
loader,
dates,
finder,
).run_pipeline(p, dates[0], dates[-1])
expected = self.df.drop('asof_date', axis=1).set_index(
['timestamp', 'sid'],
)
expected.index = pd.MultiIndex.from_product((
expected.index.levels[0],
finder.retrieve_all(expected.index.levels[1]),
))
assert_frame_equal(result, expected, check_dtype=False)
示例5: test_id_macro_dataset
def test_id_macro_dataset(self):
"""
input (self.macro_df)
asof_date timestamp value
0 2014-01-01 2014-01-01 0
3 2014-01-02 2014-01-02 1
6 2014-01-03 2014-01-03 2
output (expected):
value
2014-01-01 Equity(65 [A]) 0
Equity(66 [B]) 0
Equity(67 [C]) 0
2014-01-02 Equity(65 [A]) 1
Equity(66 [B]) 1
Equity(67 [C]) 1
2014-01-03 Equity(65 [A]) 2
Equity(66 [B]) 2
Equity(67 [C]) 2
"""
asset_info = asset_infos[0][0]
nassets = len(asset_info)
with tmp_asset_finder() as finder:
expected = pd.DataFrame(
list(concatv([0] * nassets, [1] * nassets, [2] * nassets)),
index=pd.MultiIndex.from_product((self.macro_df.timestamp, finder.retrieve_all(asset_info.index))),
columns=("value",),
)
self._test_id(self.macro_df, self.macro_dshape, expected, finder, ("value",))
示例6: test_retrieve_specific_type
def test_retrieve_specific_type(self, type_, lookup_name, failure_type):
equities = make_simple_equity_info(
range(5), start_date=pd.Timestamp("2014-01-01"), end_date=pd.Timestamp("2015-01-01")
)
max_equity = equities.index.max()
futures = make_commodity_future_info(first_sid=max_equity + 1, root_symbols=["CL"], years=[2014])
equity_sids = [0, 1]
future_sids = [max_equity + 1, max_equity + 2, max_equity + 3]
if type_ == Equity:
success_sids = equity_sids
fail_sids = future_sids
else:
fail_sids = equity_sids
success_sids = future_sids
with tmp_asset_finder(equities=equities, futures=futures) as finder:
# Run twice to exercise caching.
lookup = getattr(finder, lookup_name)
for _ in range(2):
results = lookup(success_sids)
self.assertIsInstance(results, dict)
self.assertEqual(set(results.keys()), set(success_sids))
self.assertEqual(valmap(int, results), dict(zip(success_sids, success_sids)))
self.assertEqual({type_}, {type(asset) for asset in itervalues(results)})
with self.assertRaises(failure_type):
lookup(fail_sids)
with self.assertRaises(failure_type):
# Should fail if **any** of the assets are bad.
lookup([success_sids[0], fail_sids[0]])
示例7: test_custom_query_time_tz
def test_custom_query_time_tz(self):
df = self.df.copy()
df["timestamp"] = (
(pd.DatetimeIndex(df["timestamp"], tz="EST") + timedelta(hours=8, minutes=44))
.tz_convert("utc")
.tz_localize(None)
)
df.ix[3:5, "timestamp"] = pd.Timestamp("2014-01-01 13:45")
expr = bz.Data(df, name="expr", dshape=self.dshape)
loader = BlazeLoader(data_query_time=time(8, 45), data_query_tz="EST")
ds = from_blaze(expr, loader=loader, no_deltas_rule=no_deltas_rules.ignore, missing_values=self.missing_values)
p = Pipeline()
p.add(ds.value.latest, "value")
p.add(ds.int_value.latest, "int_value")
dates = self.dates
with tmp_asset_finder() as finder:
result = SimplePipelineEngine(loader, dates, finder).run_pipeline(p, dates[0], dates[-1])
expected = df.drop("asof_date", axis=1)
expected["timestamp"] = expected["timestamp"].dt.normalize().astype("datetime64[ns]").dt.tz_localize("utc")
expected.ix[3:5, "timestamp"] += timedelta(days=1)
expected.set_index(["timestamp", "sid"], inplace=True)
expected.index = pd.MultiIndex.from_product(
(expected.index.levels[0], finder.retrieve_all(expected.index.levels[1]))
)
assert_frame_equal(result, expected, check_dtype=False)
示例8: test_id_macro_dataset
def test_id_macro_dataset(self):
expr = bz.Data(self.macro_df, name='expr', dshape=self.macro_dshape)
loader = BlazeLoader()
ds = from_blaze(
expr,
loader=loader,
no_deltas_rule=no_deltas_rules.ignore,
)
p = Pipeline()
p.add(ds.value.latest, 'value')
dates = self.dates
asset_info = asset_infos[0][0]
with tmp_asset_finder(equities=asset_info) as finder:
result = SimplePipelineEngine(
loader,
dates,
finder,
).run_pipeline(p, dates[0], dates[-1])
nassets = len(asset_info)
expected = pd.DataFrame(
list(concatv([0] * nassets, [1] * nassets, [2] * nassets)),
index=pd.MultiIndex.from_product((
self.macro_df.timestamp,
finder.retrieve_all(asset_info.index),
)),
columns=('value',),
)
assert_frame_equal(result, expected, check_dtype=False)
示例9: test_group_by_type
def test_group_by_type(self):
equities = make_simple_equity_info(
range(5),
start_date=pd.Timestamp('2014-01-01'),
end_date=pd.Timestamp('2015-01-01'),
)
futures = make_commodity_future_info(
first_sid=6,
root_symbols=['CL'],
years=[2014],
)
# Intersecting sid queries, to exercise loading of partially-cached
# results.
queries = [
([0, 1, 3], [6, 7]),
([0, 2, 3], [7, 10]),
(list(equities.index), list(futures.index)),
]
with tmp_asset_finder(equities=equities, futures=futures) as finder:
for equity_sids, future_sids in queries:
results = finder.group_by_type(equity_sids + future_sids)
self.assertEqual(
results,
{'equity': set(equity_sids), 'future': set(future_sids)},
)
示例10: test_id
def test_id(self):
"""
input (self.df):
asof_date sid timestamp value
0 2014-01-01 65 2014-01-01 0
1 2014-01-01 66 2014-01-01 1
2 2014-01-01 67 2014-01-01 2
3 2014-01-02 65 2014-01-02 1
4 2014-01-02 66 2014-01-02 2
5 2014-01-02 67 2014-01-02 3
6 2014-01-03 65 2014-01-03 2
7 2014-01-03 66 2014-01-03 3
8 2014-01-03 67 2014-01-03 4
output (expected)
value
2014-01-01 Equity(65 [A]) 0
Equity(66 [B]) 1
Equity(67 [C]) 2
2014-01-02 Equity(65 [A]) 1
Equity(66 [B]) 2
Equity(67 [C]) 3
2014-01-03 Equity(65 [A]) 2
Equity(66 [B]) 3
Equity(67 [C]) 4
"""
with tmp_asset_finder() as finder:
expected = self.df.drop('asof_date', axis=1).set_index(
['timestamp', 'sid'],
)
expected.index = pd.MultiIndex.from_product((
expected.index.levels[0],
finder.retrieve_all(expected.index.levels[1]),
))
self._test_id(self.df, self.dshape, expected, finder, ('value',))
示例11: test_id_macro_dataset_multiple_columns
def test_id_macro_dataset_multiple_columns(self):
"""
input (df):
asof_date timestamp other value
0 2014-01-01 2014-01-01 1 0
3 2014-01-02 2014-01-02 2 1
6 2014-01-03 2014-01-03 3 2
output (expected):
other value
2014-01-01 Equity(65 [A]) 1 0
Equity(66 [B]) 1 0
Equity(67 [C]) 1 0
2014-01-02 Equity(65 [A]) 2 1
Equity(66 [B]) 2 1
Equity(67 [C]) 2 1
2014-01-03 Equity(65 [A]) 3 2
Equity(66 [B]) 3 2
Equity(67 [C]) 3 2
"""
df = self.macro_df.copy()
df['other'] = df.value + 1
fields = OrderedDict(self.macro_dshape.measure.fields)
fields['other'] = fields['value']
expr = bz.Data(df, name='expr', dshape=var * Record(fields))
loader = BlazeLoader()
ds = from_blaze(
expr,
loader=loader,
no_deltas_rule=no_deltas_rules.ignore,
)
p = Pipeline()
p.add(ds.value.latest, 'value')
p.add(ds.other.latest, 'other')
dates = self.dates
asset_info = asset_infos[0][0]
with tmp_asset_finder(equities=asset_info) as finder:
result = SimplePipelineEngine(
loader,
dates,
finder,
).run_pipeline(p, dates[0], dates[-1])
expected = pd.DataFrame(
np.array([[0, 1],
[1, 2],
[2, 3]]).repeat(3, axis=0),
index=pd.MultiIndex.from_product((
df.timestamp,
finder.retrieve_all(asset_info.index),
)),
columns=('value', 'other'),
).sort_index(axis=1)
assert_frame_equal(
result,
expected.sort_index(axis=1),
check_dtype=False,
)
示例12: test_novel_deltas_macro
def test_novel_deltas_macro(self):
asset_info = asset_infos[0][0]
base_dates = pd.DatetimeIndex([
pd.Timestamp('2014-01-01'),
pd.Timestamp('2014-01-04')
])
baseline = pd.DataFrame({
'value': (0, 1),
'asof_date': base_dates,
'timestamp': base_dates,
})
expr = bz.Data(baseline, name='expr', dshape=self.macro_dshape)
deltas = bz.Data(baseline, name='deltas', dshape=self.macro_dshape)
deltas = bz.transform(
deltas,
value=deltas.value + 10,
timestamp=deltas.timestamp + timedelta(days=1),
)
nassets = len(asset_info)
expected_views = keymap(pd.Timestamp, {
'2014-01-03': repeat_last_axis(
np.array([10.0, 10.0, 10.0]),
nassets,
),
'2014-01-06': repeat_last_axis(
np.array([10.0, 10.0, 11.0]),
nassets,
),
})
cal = pd.DatetimeIndex([
pd.Timestamp('2014-01-01'),
pd.Timestamp('2014-01-02'),
pd.Timestamp('2014-01-03'),
# omitting the 4th and 5th to simulate a weekend
pd.Timestamp('2014-01-06'),
])
with tmp_asset_finder(equities=asset_info) as finder:
expected_output = pd.DataFrame(
list(concatv([10] * nassets, [11] * nassets)),
index=pd.MultiIndex.from_product((
sorted(expected_views.keys()),
finder.retrieve_all(asset_info.index),
)),
columns=('value',),
)
self._run_pipeline(
expr,
deltas,
expected_views,
expected_output,
finder,
calendar=cal,
start=cal[2],
end=cal[-1],
window_length=3,
compute_fn=op.itemgetter(-1),
)
示例13: setUpClass
def setUpClass(cls):
cls.__calendar = date_range("2014", "2015", freq=trading_day)
cls.__assets = assets = Int64Index(arange(1, 20))
cls.__tmp_finder_ctx = tmp_asset_finder(
equities=make_simple_equity_info(assets, cls.__calendar[0], cls.__calendar[-1])
)
cls.__finder = cls.__tmp_finder_ctx.__enter__()
cls.__mask = cls.__finder.lifetimes(cls.__calendar[-30:], include_start_date=False)
示例14: test_novel_deltas
def test_novel_deltas(self, asset_info):
base_dates = pd.DatetimeIndex([pd.Timestamp("2014-01-01"), pd.Timestamp("2014-01-04")])
repeated_dates = base_dates.repeat(3)
baseline = pd.DataFrame(
{
"sid": self.sids * 2,
"value": (0, 1, 2, 1, 2, 3),
"asof_date": repeated_dates,
"timestamp": repeated_dates,
}
)
expr = bz.Data(baseline, name="expr", dshape=self.dshape)
deltas = bz.Data(baseline, name="deltas", dshape=self.dshape)
deltas = bz.transform(deltas, value=deltas.value + 10, timestamp=deltas.timestamp + timedelta(days=1))
expected_views = keymap(
pd.Timestamp,
{
"2014-01-03": np.array([[10.0, 11.0, 12.0], [10.0, 11.0, 12.0], [10.0, 11.0, 12.0]]),
"2014-01-06": np.array([[10.0, 11.0, 12.0], [10.0, 11.0, 12.0], [11.0, 12.0, 13.0]]),
},
)
if len(asset_info) == 4:
expected_views = valmap(lambda view: np.c_[view, [np.nan, np.nan, np.nan]], expected_views)
expected_output_buffer = [10, 11, 12, np.nan, 11, 12, 13, np.nan]
else:
expected_output_buffer = [10, 11, 12, 11, 12, 13]
cal = pd.DatetimeIndex(
[
pd.Timestamp("2014-01-01"),
pd.Timestamp("2014-01-02"),
pd.Timestamp("2014-01-03"),
# omitting the 4th and 5th to simulate a weekend
pd.Timestamp("2014-01-06"),
]
)
with tmp_asset_finder(equities=asset_info) as finder:
expected_output = pd.DataFrame(
expected_output_buffer,
index=pd.MultiIndex.from_product(
(sorted(expected_views.keys()), finder.retrieve_all(asset_info.index))
),
columns=("value",),
)
self._run_pipeline(
expr,
deltas,
expected_views,
expected_output,
finder,
calendar=cal,
start=cal[2],
end=cal[-1],
window_length=3,
compute_fn=op.itemgetter(-1),
)
示例15: setUpClass
def setUpClass(cls):
cls._cleanup_stack = stack = ExitStack()
cls.finder = stack.enter_context(
tmp_asset_finder(equities=cls.equity_info),
)
cls.cols = {}
cls.dataset = create_buyback_auth_tst_frame(cls.event_dates_cases,
SHARE_COUNT_FIELD_NAME)
cls.loader_type = CashBuybackAuthorizationsLoader