本文整理匯總了Python中pandas.to_pickle方法的典型用法代碼示例。如果您正苦於以下問題:Python pandas.to_pickle方法的具體用法?Python pandas.to_pickle怎麽用?Python pandas.to_pickle使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在類pandas
的用法示例。
在下文中一共展示了pandas.to_pickle方法的15個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Python代碼示例。
示例1: test_write_explicit
# 需要導入模塊: import pandas [as 別名]
# 或者: from pandas import to_pickle [as 別名]
def test_write_explicit(self, compression, get_random_path):
base = get_random_path
path1 = base + ".compressed"
path2 = base + ".raw"
with tm.ensure_clean(path1) as p1, tm.ensure_clean(path2) as p2:
df = tm.makeDataFrame()
# write to compressed file
df.to_pickle(p1, compression=compression)
# decompress
with tm.decompress_file(p1, compression=compression) as f:
with open(p2, "wb") as fh:
fh.write(f.read())
# read decompressed file
df2 = pd.read_pickle(p2, compression=None)
tm.assert_frame_equal(df, df2)
示例2: test_read_explicit
# 需要導入模塊: import pandas [as 別名]
# 或者: from pandas import to_pickle [as 別名]
def test_read_explicit(self, compression, get_random_path):
base = get_random_path
path1 = base + ".raw"
path2 = base + ".compressed"
with tm.ensure_clean(path1) as p1, tm.ensure_clean(path2) as p2:
df = tm.makeDataFrame()
# write to uncompressed file
df.to_pickle(p1, compression=None)
# compress
self.compress_file(p1, p2, compression=compression)
# read compressed file
df2 = pd.read_pickle(p2, compression=compression)
tm.assert_frame_equal(df, df2)
示例3: round_trip_pickle
# 需要導入模塊: import pandas [as 別名]
# 或者: from pandas import to_pickle [as 別名]
def round_trip_pickle(obj, path=None):
"""
Pickle an object and then read it again.
Parameters
----------
obj : pandas object
The object to pickle and then re-read.
path : str, default None
The path where the pickled object is written and then read.
Returns
-------
round_trip_pickled_object : pandas object
The original object that was pickled and then re-read.
"""
if path is None:
path = u('__{random_bytes}__.pickle'.format(random_bytes=rands(10)))
with ensure_clean(path) as path:
pd.to_pickle(obj, path)
return pd.read_pickle(path)
示例4: _handle_exception
# 需要導入模塊: import pandas [as 別名]
# 或者: from pandas import to_pickle [as 別名]
def _handle_exception(statements, database, exception_info):
directory = Path(str(database.bind.url)[10:])
if not directory.is_dir():
directory = Path(".")
directory = directory.resolve()
for stat in statements:
if isinstance(stat, (sqlalchemy.sql.dml.Insert, sqlalchemy.sql.dml.Update)):
values = stat.compile().params
timestamp = dt.datetime.now().strftime("%Y%m%d-%H%M%S-%f")
filename = f"{stat.table.name}_{timestamp}.pickle"
pd.to_pickle(values, directory / filename)
warnings.warn(
f"Unable to write to database. The data was saved in {directory} instead. The "
f"traceback was:\n\n{exception_info}"
)
示例5: test_write_explicit
# 需要導入模塊: import pandas [as 別名]
# 或者: from pandas import to_pickle [as 別名]
def test_write_explicit(self, compression, get_random_path):
# issue 11666
if compression == 'xz':
tm._skip_if_no_lzma()
base = get_random_path
path1 = base + ".compressed"
path2 = base + ".raw"
with tm.ensure_clean(path1) as p1, tm.ensure_clean(path2) as p2:
df = tm.makeDataFrame()
# write to compressed file
df.to_pickle(p1, compression=compression)
# decompress
self.decompress_file(p1, p2, compression=compression)
# read decompressed file
df2 = pd.read_pickle(p2, compression=None)
tm.assert_frame_equal(df, df2)
示例6: test_read_explicit
# 需要導入模塊: import pandas [as 別名]
# 或者: from pandas import to_pickle [as 別名]
def test_read_explicit(self, compression, get_random_path):
# issue 11666
if compression == 'xz':
tm._skip_if_no_lzma()
base = get_random_path
path1 = base + ".raw"
path2 = base + ".compressed"
with tm.ensure_clean(path1) as p1, tm.ensure_clean(path2) as p2:
df = tm.makeDataFrame()
# write to uncompressed file
df.to_pickle(p1, compression=None)
# compress
self.compress_file(p1, p2, compression=compression)
# read compressed file
df2 = pd.read_pickle(p2, compression=compression)
tm.assert_frame_equal(df, df2)
示例7: test_to_pickle
# 需要導入模塊: import pandas [as 別名]
# 或者: from pandas import to_pickle [as 別名]
def test_to_pickle():
modin_df = create_test_modin_dataframe()
pandas_df = create_test_pandas_dataframe()
TEST_PICKLE_DF_FILENAME = "test_df.pkl"
TEST_PICKLE_pandas_FILENAME = "test_pandas.pkl"
modin_df.to_pickle(TEST_PICKLE_DF_FILENAME)
pandas_df.to_pickle(TEST_PICKLE_pandas_FILENAME)
assert assert_files_eq(TEST_PICKLE_DF_FILENAME, TEST_PICKLE_pandas_FILENAME)
teardown_test_file(TEST_PICKLE_pandas_FILENAME)
teardown_test_file(TEST_PICKLE_DF_FILENAME)
pd.to_pickle(modin_df, TEST_PICKLE_DF_FILENAME)
pandas.to_pickle(pandas_df, TEST_PICKLE_pandas_FILENAME)
assert assert_files_eq(TEST_PICKLE_DF_FILENAME, TEST_PICKLE_pandas_FILENAME)
teardown_test_file(TEST_PICKLE_pandas_FILENAME)
teardown_test_file(TEST_PICKLE_DF_FILENAME)
示例8: test_pickle_path_pathlib
# 需要導入模塊: import pandas [as 別名]
# 或者: from pandas import to_pickle [as 別名]
def test_pickle_path_pathlib():
df = tm.makeDataFrame()
result = tm.round_trip_pathlib(df.to_pickle, pd.read_pickle)
tm.assert_frame_equal(df, result)
示例9: test_pickle_path_localpath
# 需要導入模塊: import pandas [as 別名]
# 或者: from pandas import to_pickle [as 別名]
def test_pickle_path_localpath():
df = tm.makeDataFrame()
result = tm.round_trip_localpath(df.to_pickle, pd.read_pickle)
tm.assert_frame_equal(df, result)
# ---------------------
# test pickle compression
# ---------------------
示例10: test_write_explicit_bad
# 需要導入模塊: import pandas [as 別名]
# 或者: from pandas import to_pickle [as 別名]
def test_write_explicit_bad(self, compression, get_random_path):
with pytest.raises(ValueError, match="Unrecognized compression type"):
with tm.ensure_clean(get_random_path) as path:
df = tm.makeDataFrame()
df.to_pickle(path, compression=compression)
示例11: test_read_infer
# 需要導入模塊: import pandas [as 別名]
# 或者: from pandas import to_pickle [as 別名]
def test_read_infer(self, ext, get_random_path):
base = get_random_path
path1 = base + ".raw"
path2 = base + ext
compression = None
for c in self._compression_to_extension:
if self._compression_to_extension[c] == ext:
compression = c
break
with tm.ensure_clean(path1) as p1, tm.ensure_clean(path2) as p2:
df = tm.makeDataFrame()
# write to uncompressed file
df.to_pickle(p1, compression=None)
# compress
self.compress_file(p1, p2, compression=compression)
# read compressed file by inferred compression method
df2 = pd.read_pickle(p2)
tm.assert_frame_equal(df, df2)
# ---------------------
# test pickle compression
# ---------------------
示例12: test_read
# 需要導入模塊: import pandas [as 別名]
# 或者: from pandas import to_pickle [as 別名]
def test_read(self, protocol, get_random_path):
with tm.ensure_clean(get_random_path) as path:
df = tm.makeDataFrame()
df.to_pickle(path, protocol=protocol)
df2 = pd.read_pickle(path)
tm.assert_frame_equal(df, df2)
示例13: test_read_bad_versions
# 需要導入模塊: import pandas [as 別名]
# 或者: from pandas import to_pickle [as 別名]
def test_read_bad_versions(self, protocol, get_random_path):
# For Python 2, HIGHEST_PROTOCOL should be 2.
msg = ("pickle protocol {protocol} asked for; the highest available "
"protocol is 2").format(protocol=protocol)
with pytest.raises(ValueError, match=msg):
with tm.ensure_clean(get_random_path) as path:
df = tm.makeDataFrame()
df.to_pickle(path, protocol=protocol)
示例14: serialize
# 需要導入模塊: import pandas [as 別名]
# 或者: from pandas import to_pickle [as 別名]
def serialize(cls, formatted_data, fh):
# compat: if pandas is old, to_pickle does not accept file handles
if LooseVersion(pandas.__version__) <= LooseVersion('0.20.3'):
fh.close()
fh = fh.name
return pandas.to_pickle(formatted_data, fh, compression=None)
示例15: create_context_graph
# 需要導入模塊: import pandas [as 別名]
# 或者: from pandas import to_pickle [as 別名]
def create_context_graph(self, max_num_layers, workers=1, verbose=0,):
pair_distances = self._compute_structural_distance(
max_num_layers, workers, verbose,)
layers_adj, layers_distances = self._get_layer_rep(pair_distances)
pd.to_pickle(layers_adj, self.temp_path + 'layers_adj.pkl')
layers_accept, layers_alias = self._get_transition_probs(
layers_adj, layers_distances)
pd.to_pickle(layers_alias, self.temp_path + 'layers_alias.pkl')
pd.to_pickle(layers_accept, self.temp_path + 'layers_accept.pkl')