本文整理汇总了Python中pandas_gbq.to_gbq方法的典型用法代码示例。如果您正苦于以下问题:Python pandas_gbq.to_gbq方法的具体用法?Python pandas_gbq.to_gbq怎么用?Python pandas_gbq.to_gbq使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类pandas_gbq
的用法示例。
在下文中一共展示了pandas_gbq.to_gbq方法的6个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: main
# 需要导入模块: import pandas_gbq [as 别名]
# 或者: from pandas_gbq import to_gbq [as 别名]
def main(project_id, table_id):
# [START bigquery_pandas_gbq_to_gbq_simple]
import pandas
import pandas_gbq
# TODO: Set project_id to your Google Cloud Platform project ID.
# project_id = "my-project"
# TODO: Set table_id to the full destination table ID (including the
# dataset ID).
# table_id = 'my_dataset.my_table'
df = pandas.DataFrame(
{
"my_string": ["a", "b", "c"],
"my_int64": [1, 2, 3],
"my_float64": [4.0, 5.0, 6.0],
"my_bool1": [True, False, True],
"my_bool2": [False, True, False],
"my_dates": pandas.date_range("now", periods=3),
}
)
pandas_gbq.to_gbq(df, table_id, project_id=project_id)
# [END bigquery_pandas_gbq_to_gbq_simple]
示例2: to_gbq
# 需要导入模块: import pandas_gbq [as 别名]
# 或者: from pandas_gbq import to_gbq [as 别名]
def to_gbq(dataframe, destination_table, project_id=None, chunksize=None,
reauth=False, if_exists='fail', auth_local_webserver=False,
table_schema=None, location=None, progress_bar=True,
credentials=None, verbose=None, private_key=None):
pandas_gbq = _try_import()
return pandas_gbq.to_gbq(
dataframe, destination_table, project_id=project_id,
chunksize=chunksize, reauth=reauth, if_exists=if_exists,
auth_local_webserver=auth_local_webserver, table_schema=table_schema,
location=location, progress_bar=progress_bar,
credentials=credentials, verbose=verbose, private_key=private_key)
示例3: to_gbq
# 需要导入模块: import pandas_gbq [as 别名]
# 或者: from pandas_gbq import to_gbq [as 别名]
def to_gbq(dataframe, destination_table, project_id, chunksize=None,
verbose=None, reauth=False, if_exists='fail', private_key=None,
auth_local_webserver=False, table_schema=None):
pandas_gbq = _try_import()
return pandas_gbq.to_gbq(
dataframe, destination_table, project_id, chunksize=chunksize,
verbose=verbose, reauth=reauth, if_exists=if_exists,
private_key=private_key, auth_local_webserver=auth_local_webserver,
table_schema=table_schema)
示例4: to_gbq
# 需要导入模块: import pandas_gbq [as 别名]
# 或者: from pandas_gbq import to_gbq [as 别名]
def to_gbq(dataframe, destination_table, project_id, chunksize=10000,
verbose=True, reauth=False, if_exists='fail', private_key=None):
pandas_gbq = _try_import()
pandas_gbq.to_gbq(dataframe, destination_table, project_id,
chunksize=chunksize,
verbose=verbose, reauth=reauth,
if_exists=if_exists, private_key=private_key)
示例5: df_to_sql
# 需要导入模块: import pandas_gbq [as 别名]
# 或者: from pandas_gbq import to_gbq [as 别名]
def df_to_sql(cls, df: pd.DataFrame, **kwargs: Any) -> None:
"""
Upload data from a Pandas DataFrame to BigQuery. Calls
`DataFrame.to_gbq()` which requires `pandas_gbq` to be installed.
:param df: Dataframe with data to be uploaded
:param kwargs: kwargs to be passed to to_gbq() method. Requires that `schema`,
`name` and `con` are present in kwargs. `name` and `schema` are combined
and passed to `to_gbq()` as `destination_table`.
"""
try:
import pandas_gbq
from google.oauth2 import service_account
except ImportError:
raise Exception(
"Could not import libraries `pandas_gbq` or `google.oauth2`, which are "
"required to be installed in your environment in order "
"to upload data to BigQuery"
)
if not ("name" in kwargs and "schema" in kwargs and "con" in kwargs):
raise Exception("name, schema and con need to be defined in kwargs")
gbq_kwargs = {}
gbq_kwargs["project_id"] = kwargs["con"].engine.url.host
gbq_kwargs["destination_table"] = f"{kwargs.pop('schema')}.{kwargs.pop('name')}"
# add credentials if they are set on the SQLAlchemy Dialect:
creds = kwargs["con"].dialect.credentials_info
if creds:
credentials = service_account.Credentials.from_service_account_info(creds)
gbq_kwargs["credentials"] = credentials
# Only pass through supported kwargs
supported_kwarg_keys = {"if_exists"}
for key in supported_kwarg_keys:
if key in kwargs:
gbq_kwargs[key] = kwargs[key]
pandas_gbq.to_gbq(df, **gbq_kwargs)
示例6: test_df_to_sql
# 需要导入模块: import pandas_gbq [as 别名]
# 或者: from pandas_gbq import to_gbq [as 别名]
def test_df_to_sql(self):
"""
DB Eng Specs (bigquery): Test DataFrame to SQL contract
"""
# test missing google.oauth2 dependency
sys.modules["pandas_gbq"] = mock.MagicMock()
df = DataFrame()
self.assertRaisesRegexp(
Exception,
"Could not import libraries",
BigQueryEngineSpec.df_to_sql,
df,
con="some_connection",
schema="schema",
name="name",
)
invalid_kwargs = [
{"name": "some_name"},
{"schema": "some_schema"},
{"con": "some_con"},
{"name": "some_name", "con": "some_con"},
{"name": "some_name", "schema": "some_schema"},
{"con": "some_con", "schema": "some_schema"},
]
# Test check for missing required kwargs (name, schema, con)
sys.modules["google.oauth2"] = mock.MagicMock()
for invalid_kwarg in invalid_kwargs:
self.assertRaisesRegexp(
Exception,
"name, schema and con need to be defined in kwargs",
BigQueryEngineSpec.df_to_sql,
df,
**invalid_kwarg,
)
import pandas_gbq
from google.oauth2 import service_account
pandas_gbq.to_gbq = mock.Mock()
service_account.Credentials.from_service_account_info = mock.MagicMock(
return_value="account_info"
)
connection = mock.Mock()
connection.engine.url.host = "google-host"
connection.dialect.credentials_info = "secrets"
BigQueryEngineSpec.df_to_sql(
df, con=connection, schema="schema", name="name", if_exists="extra_key"
)
pandas_gbq.to_gbq.assert_called_with(
df,
project_id="google-host",
destination_table="schema.name",
credentials="account_info",
if_exists="extra_key",
)