本文整理汇总了Python中zipline.assets.synthetic.make_simple_equity_info函数的典型用法代码示例。如果您正苦于以下问题:Python make_simple_equity_info函数的具体用法?Python make_simple_equity_info怎么用?Python make_simple_equity_info使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了make_simple_equity_info函数的14个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: test_group_by_type
def test_group_by_type(self):
equities = make_simple_equity_info(
range(5),
start_date=pd.Timestamp('2014-01-01'),
end_date=pd.Timestamp('2015-01-01'),
)
futures = make_commodity_future_info(
first_sid=6,
root_symbols=['CL'],
years=[2014],
)
# Intersecting sid queries, to exercise loading of partially-cached
# results.
queries = [
([0, 1, 3], [6, 7]),
([0, 2, 3], [7, 10]),
(list(equities.index), list(futures.index)),
]
self.write_assets(
equities=equities,
futures=futures,
)
finder = self.asset_finder
for equity_sids, future_sids in queries:
results = finder.group_by_type(equity_sids + future_sids)
self.assertEqual(
results,
{'equity': set(equity_sids), 'future': set(future_sids)},
)
示例2: temp_pipeline_engine
def temp_pipeline_engine(calendar, sids, random_seed, symbols=None):
"""
A contextManager that yields a SimplePipelineEngine holding a reference to
an AssetFinder generated via tmp_asset_finder.
Parameters
----------
calendar : pd.DatetimeIndex
Calendar to pass to the constructed PipelineEngine.
sids : iterable[int]
Sids to use for the temp asset finder.
random_seed : int
Integer used to seed instances of SeededRandomLoader.
symbols : iterable[str], optional
Symbols for constructed assets. Forwarded to make_simple_equity_info.
"""
equity_info = make_simple_equity_info(
sids=sids,
start_date=calendar[0],
end_date=calendar[-1],
symbols=symbols,
)
loader = make_seeded_random_loader(random_seed, calendar, sids)
def get_loader(column):
return loader
with tmp_asset_finder(equities=equity_info) as finder:
yield SimplePipelineEngine(get_loader, calendar, finder)
示例3: test_sids
def test_sids(self):
# Ensure that the sids property of the AssetFinder is functioning
self.write_assets(equities=make_simple_equity_info(
[0, 1, 2],
pd.Timestamp('2014-01-01'),
pd.Timestamp('2014-01-02'),
))
self.assertEqual({0, 1, 2}, set(self.asset_finder.sids))
示例4: __init__
def __init__(self, equities=_default_equities, **frames):
self._eng = None
if equities is self._default_equities:
equities = make_simple_equity_info(list(map(ord, "ABC")), pd.Timestamp(0), pd.Timestamp("2015"))
frames["equities"] = equities
self._frames = frames
self._eng = None # set in enter and exit
示例5: init_class_fixtures
def init_class_fixtures(cls):
super(BasePipelineTestCase, cls).init_class_fixtures()
cls.__calendar = date_range("2014", "2015", freq=cls.trading_calendar.day)
cls.__assets = assets = Int64Index(arange(1, 20))
cls.__tmp_finder_ctx = tmp_asset_finder(
equities=make_simple_equity_info(assets, cls.__calendar[0], cls.__calendar[-1])
)
cls.__finder = cls.__tmp_finder_ctx.__enter__()
cls.__mask = cls.__finder.lifetimes(cls.__calendar[-30:], include_start_date=False)
示例6: __init__
def __init__(self, equities=_default_equities, **frames):
self._eng = None
if equities is self._default_equities:
equities = make_simple_equity_info(
list(map(ord, 'ABC')),
pd.Timestamp(0),
pd.Timestamp('2015'),
)
frames['equities'] = equities
self._frames = frames
self._eng = None # set in enter and exit
示例7: test_retrieve_all
def test_retrieve_all(self):
equities = make_simple_equity_info(
range(5),
start_date=pd.Timestamp('2014-01-01'),
end_date=pd.Timestamp('2015-01-01'),
)
max_equity = equities.index.max()
futures = make_commodity_future_info(
first_sid=max_equity + 1,
root_symbols=['CL'],
years=[2014],
)
self.write_assets(
equities=equities,
futures=futures,
)
finder = self.asset_finder
all_sids = finder.sids
self.assertEqual(len(all_sids), len(equities) + len(futures))
queries = [
# Empty Query.
(),
# Only Equities.
tuple(equities.index[:2]),
# Only Futures.
tuple(futures.index[:3]),
# Mixed, all cache misses.
tuple(equities.index[2:]) + tuple(futures.index[3:]),
# Mixed, all cache hits.
tuple(equities.index[2:]) + tuple(futures.index[3:]),
# Everything.
all_sids,
all_sids,
]
for sids in queries:
equity_sids = [i for i in sids if i <= max_equity]
future_sids = [i for i in sids if i > max_equity]
results = finder.retrieve_all(sids)
self.assertEqual(sids, tuple(map(int, results)))
self.assertEqual(
[Equity for _ in equity_sids] +
[Future for _ in future_sids],
list(map(type, results)),
)
self.assertEqual(
(
list(equities.symbol.loc[equity_sids]) +
list(futures.symbol.loc[future_sids])
),
list(asset.symbol for asset in results),
)
示例8: setUpClass
def setUpClass(cls):
cls.__calendar = date_range('2014', '2015', freq=trading_day)
cls.__assets = assets = Int64Index(arange(1, 20))
cls.__tmp_finder_ctx = tmp_asset_finder(
equities=make_simple_equity_info(
assets,
cls.__calendar[0],
cls.__calendar[-1],
)
)
cls.__finder = cls.__tmp_finder_ctx.__enter__()
cls.__mask = cls.__finder.lifetimes(
cls.__calendar[-30:],
include_start_date=False,
)
示例9: test_retrieve_specific_type
def test_retrieve_specific_type(self, type_, lookup_name, failure_type):
equities = make_simple_equity_info(
range(5),
start_date=pd.Timestamp('2014-01-01'),
end_date=pd.Timestamp('2015-01-01'),
)
max_equity = equities.index.max()
futures = make_commodity_future_info(
first_sid=max_equity + 1,
root_symbols=['CL'],
years=[2014],
)
equity_sids = [0, 1]
future_sids = [max_equity + 1, max_equity + 2, max_equity + 3]
if type_ == Equity:
success_sids = equity_sids
fail_sids = future_sids
else:
fail_sids = equity_sids
success_sids = future_sids
self.write_assets(
equities=equities,
futures=futures,
)
finder = self.asset_finder
# Run twice to exercise caching.
lookup = getattr(finder, lookup_name)
for _ in range(2):
results = lookup(success_sids)
self.assertIsInstance(results, dict)
self.assertEqual(set(results.keys()), set(success_sids))
self.assertEqual(
valmap(int, results),
dict(zip(success_sids, success_sids)),
)
self.assertEqual(
{type_},
{type(asset) for asset in itervalues(results)},
)
with self.assertRaises(failure_type):
lookup(fail_sids)
with self.assertRaises(failure_type):
# Should fail if **any** of the assets are bad.
lookup([success_sids[0], fail_sids[0]])
示例10: bundle_ingest_create_writers
def bundle_ingest_create_writers(
environ,
asset_db_writer,
minute_bar_writer,
daily_bar_writer,
adjustment_writer,
calendar,
start_session,
end_session,
cache,
show_progress,
output_dir,
):
self.assertIsNotNone(asset_db_writer)
self.assertIsNotNone(minute_bar_writer)
self.assertIsNotNone(daily_bar_writer)
self.assertIsNotNone(adjustment_writer)
equities = make_simple_equity_info(tuple(range(3)), self.START_DATE, self.END_DATE)
asset_db_writer.write(equities=equities)
called[0] = True
示例11: test_ingest
def test_ingest(self):
start = pd.Timestamp('2014-01-06', tz='utc')
end = pd.Timestamp('2014-01-10', tz='utc')
calendar = get_calendar('NYSE')
sessions = calendar.sessions_in_range(start, end)
minutes = calendar.minutes_for_sessions_in_range(start, end)
sids = tuple(range(3))
equities = make_simple_equity_info(
sids,
start,
end,
)
daily_bar_data = make_bar_data(equities, sessions)
minute_bar_data = make_bar_data(equities, minutes)
first_split_ratio = 0.5
second_split_ratio = 0.1
splits = pd.DataFrame.from_records([
{
'effective_date': str_to_seconds('2014-01-08'),
'ratio': first_split_ratio,
'sid': 0,
},
{
'effective_date': str_to_seconds('2014-01-09'),
'ratio': second_split_ratio,
'sid': 1,
},
])
@self.register(
'bundle',
calendar=calendar,
start_session=start,
end_session=end,
)
def bundle_ingest(environ,
asset_db_writer,
minute_bar_writer,
daily_bar_writer,
adjustment_writer,
calendar,
start_session,
end_session,
cache,
show_progress,
output_dir):
assert_is(environ, self.environ)
asset_db_writer.write(equities=equities)
minute_bar_writer.write(minute_bar_data)
daily_bar_writer.write(daily_bar_data)
adjustment_writer.write(splits=splits)
assert_is_instance(calendar, TradingCalendar)
assert_is_instance(cache, dataframe_cache)
assert_is_instance(show_progress, bool)
self.ingest('bundle', environ=self.environ)
bundle = self.load('bundle', environ=self.environ)
assert_equal(set(bundle.asset_finder.sids), set(sids))
columns = 'open', 'high', 'low', 'close', 'volume'
actual = bundle.equity_minute_bar_reader.load_raw_arrays(
columns,
minutes[0],
minutes[-1],
sids,
)
for actual_column, colname in zip(actual, columns):
assert_equal(
actual_column,
expected_bar_values_2d(minutes, equities, colname),
msg=colname,
)
actual = bundle.equity_daily_bar_reader.load_raw_arrays(
columns,
start,
end,
sids,
)
for actual_column, colname in zip(actual, columns):
assert_equal(
actual_column,
expected_bar_values_2d(sessions, equities, colname),
msg=colname,
)
adjustments_for_cols = bundle.adjustment_reader.load_adjustments(
columns,
sessions,
pd.Index(sids),
)
for column, adjustments in zip(columns, adjustments_for_cols[:-1]):
# iterate over all the adjustments but `volume`
#.........这里部分代码省略.........
示例12: transaction_sim
def transaction_sim(self, **params):
"""This is a utility method that asserts expected
results for conversion of orders to transactions given a
trade history
"""
trade_count = params["trade_count"]
trade_interval = params["trade_interval"]
order_count = params["order_count"]
order_amount = params["order_amount"]
order_interval = params["order_interval"]
expected_txn_count = params["expected_txn_count"]
expected_txn_volume = params["expected_txn_volume"]
# optional parameters
# ---------------------
# if present, alternate between long and short sales
alternate = params.get("alternate")
# if present, expect transaction amounts to match orders exactly.
complete_fill = params.get("complete_fill")
sid = 1
metadata = make_simple_equity_info([sid], self.start, self.end)
with TempDirectory() as tempdir, tmp_trading_env(equities=metadata) as env:
if trade_interval < timedelta(days=1):
sim_params = factory.create_simulation_parameters(
start=self.start, end=self.end, data_frequency="minute"
)
minutes = env.market_minute_window(
sim_params.first_open, int((trade_interval.total_seconds() / 60) * trade_count) + 100
)
price_data = np.array([10.1] * len(minutes))
assets = {
sid: pd.DataFrame(
{
"open": price_data,
"high": price_data,
"low": price_data,
"close": price_data,
"volume": np.array([100] * len(minutes)),
"dt": minutes,
}
).set_index("dt")
}
write_bcolz_minute_data(
env, env.days_in_range(minutes[0], minutes[-1]), tempdir.path, iteritems(assets)
)
equity_minute_reader = BcolzMinuteBarReader(tempdir.path)
data_portal = DataPortal(
env,
first_trading_day=equity_minute_reader.first_trading_day,
equity_minute_reader=equity_minute_reader,
)
else:
sim_params = factory.create_simulation_parameters(data_frequency="daily")
days = sim_params.trading_days
assets = {
1: pd.DataFrame(
{
"open": [10.1] * len(days),
"high": [10.1] * len(days),
"low": [10.1] * len(days),
"close": [10.1] * len(days),
"volume": [100] * len(days),
"day": [day.value for day in days],
},
index=days,
)
}
path = os.path.join(tempdir.path, "testdata.bcolz")
BcolzDailyBarWriter(path, days).write(assets.items())
equity_daily_reader = BcolzDailyBarReader(path)
data_portal = DataPortal(
env,
first_trading_day=equity_daily_reader.first_trading_day,
equity_daily_reader=equity_daily_reader,
)
if "default_slippage" not in params or not params["default_slippage"]:
slippage_func = FixedSlippage()
else:
slippage_func = None
blotter = Blotter(sim_params.data_frequency, self.env.asset_finder, slippage_func)
start_date = sim_params.first_open
if alternate:
alternator = -1
#.........这里部分代码省略.........
示例13: test_ingest
def test_ingest(self):
calendar = get_calendar("NYSE")
sessions = calendar.sessions_in_range(self.START_DATE, self.END_DATE)
minutes = calendar.minutes_for_sessions_in_range(self.START_DATE, self.END_DATE)
sids = tuple(range(3))
equities = make_simple_equity_info(sids, self.START_DATE, self.END_DATE)
daily_bar_data = make_bar_data(equities, sessions)
minute_bar_data = make_bar_data(equities, minutes)
first_split_ratio = 0.5
second_split_ratio = 0.1
splits = pd.DataFrame.from_records(
[
{"effective_date": str_to_seconds("2014-01-08"), "ratio": first_split_ratio, "sid": 0},
{"effective_date": str_to_seconds("2014-01-09"), "ratio": second_split_ratio, "sid": 1},
]
)
@self.register("bundle", calendar_name="NYSE", start_session=self.START_DATE, end_session=self.END_DATE)
def bundle_ingest(
environ,
asset_db_writer,
minute_bar_writer,
daily_bar_writer,
adjustment_writer,
calendar,
start_session,
end_session,
cache,
show_progress,
output_dir,
):
assert_is(environ, self.environ)
asset_db_writer.write(equities=equities)
minute_bar_writer.write(minute_bar_data)
daily_bar_writer.write(daily_bar_data)
adjustment_writer.write(splits=splits)
assert_is_instance(calendar, TradingCalendar)
assert_is_instance(cache, dataframe_cache)
assert_is_instance(show_progress, bool)
self.ingest("bundle", environ=self.environ)
bundle = self.load("bundle", environ=self.environ)
assert_equal(set(bundle.asset_finder.sids), set(sids))
columns = "open", "high", "low", "close", "volume"
actual = bundle.equity_minute_bar_reader.load_raw_arrays(columns, minutes[0], minutes[-1], sids)
for actual_column, colname in zip(actual, columns):
assert_equal(actual_column, expected_bar_values_2d(minutes, equities, colname), msg=colname)
actual = bundle.equity_daily_bar_reader.load_raw_arrays(columns, self.START_DATE, self.END_DATE, sids)
for actual_column, colname in zip(actual, columns):
assert_equal(actual_column, expected_bar_values_2d(sessions, equities, colname), msg=colname)
adjustments_for_cols = bundle.adjustment_reader.load_adjustments(columns, sessions, pd.Index(sids))
for column, adjustments in zip(columns, adjustments_for_cols[:-1]):
# iterate over all the adjustments but `volume`
assert_equal(
adjustments,
{
2: [Float64Multiply(first_row=0, last_row=2, first_col=0, last_col=0, value=first_split_ratio)],
3: [Float64Multiply(first_row=0, last_row=3, first_col=1, last_col=1, value=second_split_ratio)],
},
msg=column,
)
# check the volume, the value should be 1/ratio
assert_equal(
adjustments_for_cols[-1],
{
2: [Float64Multiply(first_row=0, last_row=2, first_col=0, last_col=0, value=1 / first_split_ratio)],
3: [Float64Multiply(first_row=0, last_row=3, first_col=1, last_col=1, value=1 / second_split_ratio)],
},
msg="volume",
)
示例14: import
NonPipelineField,
no_deltas_rules,
)
from zipline.utils.numpy_utils import (
float64_dtype,
int64_dtype,
repeat_last_axis,
)
from zipline.testing import tmp_asset_finder
nameof = op.attrgetter('name')
dtypeof = op.attrgetter('dtype')
asset_infos = (
(make_simple_equity_info(
tuple(map(ord, 'ABC')),
pd.Timestamp(0),
pd.Timestamp('2015'),
),),
(make_simple_equity_info(
tuple(map(ord, 'ABCD')),
pd.Timestamp(0),
pd.Timestamp('2015'),
),),
)
with_extra_sid = parameterized.expand(asset_infos)
with_ignore_sid = parameterized.expand(
product(chain.from_iterable(asset_infos), [True, False])
)
def _utc_localize_index_level_0(df):