本文整理汇总了Python中google.api_core.exceptions.BadRequest方法的典型用法代码示例。如果您正苦于以下问题:Python exceptions.BadRequest方法的具体用法?Python exceptions.BadRequest怎么用?Python exceptions.BadRequest使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类google.api_core.exceptions
的用法示例。
在下文中一共展示了exceptions.BadRequest方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: tearDown
# 需要导入模块: from google.api_core import exceptions [as 别名]
# 或者: from google.api_core.exceptions import BadRequest [as 别名]
def tearDown(self):
def _still_in_use(bad_request):
return any(
error["reason"] == "resourceInUse" for error in bad_request._errors
)
retry_in_use = RetryErrors(BadRequest, error_predicate=_still_in_use)
retry_storage_errors_conflict = RetryErrors(
(Conflict, TooManyRequests, InternalServerError, ServiceUnavailable)
)
for doomed in self.to_delete:
if isinstance(doomed, storage.Bucket):
retry_storage_errors_conflict(doomed.delete)(force=True)
elif isinstance(doomed, (Dataset, bigquery.DatasetReference)):
retry_in_use(Config.CLIENT.delete_dataset)(doomed, delete_contents=True)
elif isinstance(doomed, (Table, bigquery.TableReference)):
retry_in_use(Config.CLIENT.delete_table)(doomed)
else:
doomed.delete()
示例2: test_get_failed_job
# 需要导入模块: from google.api_core import exceptions [as 别名]
# 或者: from google.api_core.exceptions import BadRequest [as 别名]
def test_get_failed_job(self):
# issue 4246
from google.api_core.exceptions import BadRequest
JOB_ID = "invalid_{}".format(str(uuid.uuid4()))
QUERY = "SELECT TIMESTAMP_ADD(@ts_value, INTERVAL 1 HOUR);"
PARAM = bigquery.ScalarQueryParameter("ts_value", "TIMESTAMP", 1.4810976e9)
job_config = bigquery.QueryJobConfig()
job_config.query_parameters = [PARAM]
with self.assertRaises(BadRequest):
Config.CLIENT.query(QUERY, job_id=JOB_ID, job_config=job_config).result()
job = Config.CLIENT.get_job(JOB_ID)
with self.assertRaises(ValueError):
job.query_parameters
示例3: test_bigquery_magic_omits_tracebacks_from_error_message
# 需要导入模块: from google.api_core import exceptions [as 别名]
# 或者: from google.api_core.exceptions import BadRequest [as 别名]
def test_bigquery_magic_omits_tracebacks_from_error_message():
ip = IPython.get_ipython()
ip.extension_manager.load_extension("google.cloud.bigquery")
credentials_mock = mock.create_autospec(
google.auth.credentials.Credentials, instance=True
)
default_patch = mock.patch(
"google.auth.default", return_value=(credentials_mock, "general-project")
)
run_query_patch = mock.patch(
"google.cloud.bigquery.magics._run_query",
autospec=True,
side_effect=exceptions.BadRequest("Syntax error in SQL query"),
)
with run_query_patch, default_patch, io.capture_output() as captured_io:
ip.run_cell_magic("bigquery", "", "SELECT foo FROM WHERE LIMIT bar")
output = captured_io.stderr
assert "400 Syntax error in SQL query" in output
assert "Traceback (most recent call last)" not in output
assert "Syntax error" not in captured_io.stdout
示例4: check_definitions
# 需要导入模块: from google.api_core import exceptions [as 别名]
# 或者: from google.api_core.exceptions import BadRequest [as 别名]
def check_definitions(self, measure_defs, start_date, end_date, verbose):
"""Checks SQL definitions for measures.
"""
# We don't validate JSON here, as this is already done as a
# side-effect of parsing the command options.
errors = []
for measure_def in measure_defs:
measure_id = measure_def["id"]
try:
measure = create_or_update_measure(measure_def, end_date)
calculation = MeasureCalculation(
measure, start_date=start_date, end_date=end_date, verbose=verbose
)
calculation.check_definition()
except BadRequest as e:
errors.append("* SQL error in `{}`: {}".format(measure_id, e.args[0]))
except TypeError as e:
errors.append("* JSON error in `{}`: {}".format(measure_id, e.args[0]))
if errors:
raise BadRequest("\n".join(errors))
示例5: test_dry_run
# 需要导入模块: from google.api_core import exceptions [as 别名]
# 或者: from google.api_core.exceptions import BadRequest [as 别名]
def test_dry_run(engine, api_client):
sql = 'SELECT * FROM test_pybigquery.sample_one_row'
assert api_client.dry_run_query(sql).total_bytes_processed == 148
sql = 'SELECT * FROM sample_one_row'
with pytest.raises(BadRequest) as excinfo:
api_client.dry_run_query(sql)
assert 'Table name "sample_one_row" missing dataset while no default dataset is set in the request.' in str(excinfo.value.message)
示例6: test_new_bucket_w_ubla
# 需要导入模块: from google.api_core import exceptions [as 别名]
# 或者: from google.api_core.exceptions import BadRequest [as 别名]
def test_new_bucket_w_ubla(self):
new_bucket_name = "new-w-ubla" + unique_resource_id("-")
self.assertRaises(
exceptions.NotFound, Config.CLIENT.get_bucket, new_bucket_name
)
bucket = Config.CLIENT.bucket(new_bucket_name)
bucket.iam_configuration.uniform_bucket_level_access_enabled = True
retry_429_503(bucket.create)()
self.case_buckets_to_delete.append(new_bucket_name)
bucket_acl = bucket.acl
with self.assertRaises(exceptions.BadRequest):
bucket_acl.reload()
bucket_acl.loaded = True # Fake that we somehow loaded the ACL
bucket_acl.all().grant_read()
with self.assertRaises(exceptions.BadRequest):
bucket_acl.save()
blob_name = "my-blob.txt"
blob = bucket.blob(blob_name)
payload = b"DEADBEEF"
blob.upload_from_string(payload)
found = bucket.get_blob(blob_name)
self.assertEqual(found.download_as_string(), payload)
blob_acl = blob.acl
with self.assertRaises(exceptions.BadRequest):
blob_acl.reload()
blob_acl.loaded = True # Fake that we somehow loaded the ACL
blob_acl.all().grant_read()
with self.assertRaises(exceptions.BadRequest):
blob_acl.save()
示例7: test_ubla_set_unset_preserves_acls
# 需要导入模块: from google.api_core import exceptions [as 别名]
# 或者: from google.api_core.exceptions import BadRequest [as 别名]
def test_ubla_set_unset_preserves_acls(self):
new_bucket_name = "ubla-acls" + unique_resource_id("-")
self.assertRaises(
exceptions.NotFound, Config.CLIENT.get_bucket, new_bucket_name
)
bucket = retry_429_503(Config.CLIENT.create_bucket)(new_bucket_name)
self.case_buckets_to_delete.append(new_bucket_name)
blob_name = "my-blob.txt"
blob = bucket.blob(blob_name)
payload = b"DEADBEEF"
blob.upload_from_string(payload)
# Preserve ACLs before setting UBLA
bucket_acl_before = list(bucket.acl)
blob_acl_before = list(bucket.acl)
# Set UBLA
bucket.iam_configuration.uniform_bucket_level_access_enabled = True
bucket.patch()
self.assertTrue(bucket.iam_configuration.uniform_bucket_level_access_enabled)
# While UBLA is set, cannot get / set ACLs
with self.assertRaises(exceptions.BadRequest):
bucket.acl.reload()
# Clear UBLA
bucket.iam_configuration.uniform_bucket_level_access_enabled = False
bucket.patch()
# Query ACLs after clearing UBLA
bucket.acl.reload()
bucket_acl_after = list(bucket.acl)
blob.acl.reload()
blob_acl_after = list(bucket.acl)
self.assertEqual(bucket_acl_before, bucket_acl_after)
self.assertEqual(blob_acl_before, blob_acl_after)
示例8: test_get_current_calibration_error
# 需要导入模块: from google.api_core import exceptions [as 别名]
# 或者: from google.api_core.exceptions import BadRequest [as 别名]
def test_get_current_calibration_error(client_constructor):
grpc_client = setup_mock_(client_constructor)
grpc_client.get_quantum_calibration.side_effect = exceptions.BadRequest(
'boom')
client = EngineClient()
with pytest.raises(EngineException, match='boom'):
client.get_current_calibration('proj', 'processor0')
示例9: test_get_reservation_exception
# 需要导入模块: from google.api_core import exceptions [as 别名]
# 或者: from google.api_core.exceptions import BadRequest [as 别名]
def test_get_reservation_exception(client_constructor):
grpc_client = setup_mock_(client_constructor)
grpc_client.get_quantum_reservation.side_effect = exceptions.BadRequest(
'boom')
client = EngineClient()
with pytest.raises(EngineException, match='boom'):
client.get_reservation('proj', 'processor0', 'goog')
示例10: test_delete_dataset_delete_contents_false
# 需要导入模块: from google.api_core import exceptions [as 别名]
# 或者: from google.api_core.exceptions import BadRequest [as 别名]
def test_delete_dataset_delete_contents_false(self):
from google.api_core import exceptions
dataset = self.temp_dataset(_make_dataset_id("delete_table_false"))
table_id = "test_table"
table_arg = Table(dataset.table(table_id), schema=SCHEMA)
retry_403(Config.CLIENT.create_table)(table_arg)
with self.assertRaises(exceptions.BadRequest):
Config.CLIENT.delete_dataset(dataset)
示例11: test_query_w_failed_query
# 需要导入模块: from google.api_core import exceptions [as 别名]
# 或者: from google.api_core.exceptions import BadRequest [as 别名]
def test_query_w_failed_query(self):
from google.api_core.exceptions import BadRequest
with self.assertRaises(BadRequest):
Config.CLIENT.query("invalid syntax;").result()
示例12: test_bigquery_magic_w_table_id_invalid
# 需要导入模块: from google.api_core import exceptions [as 别名]
# 或者: from google.api_core.exceptions import BadRequest [as 别名]
def test_bigquery_magic_w_table_id_invalid():
ip = IPython.get_ipython()
ip.extension_manager.load_extension("google.cloud.bigquery")
magics.context._project = None
credentials_mock = mock.create_autospec(
google.auth.credentials.Credentials, instance=True
)
default_patch = mock.patch(
"google.auth.default", return_value=(credentials_mock, "general-project")
)
list_rows_patch = mock.patch(
"google.cloud.bigquery.magics.bigquery.Client.list_rows",
autospec=True,
side_effect=exceptions.BadRequest("Not a valid table ID"),
)
table_id = "not-a-real-table"
with list_rows_patch, default_patch, io.capture_output() as captured_io:
ip.run_cell_magic("bigquery", "df", table_id)
output = captured_io.stderr
assert "Could not save output to variable" in output
assert "400 Not a valid table ID" in output
assert "Traceback (most recent call last)" not in output
示例13: process
# 需要导入模块: from google.api_core import exceptions [as 别名]
# 或者: from google.api_core.exceptions import BadRequest [as 别名]
def process(self, element, schemas):
"""Element is a tuple of key_ name and iterable of filesystem paths."""
dataset_ref = self.get_dataset_ref()
sharded_key_name = element[0]
key_name = AssignGroupByKey.remove_shard(element[0])
object_paths = [object_path for object_path in element[1]]
job_config = bigquery.LoadJobConfig()
job_config.write_disposition = 'WRITE_APPEND'
job_config.schema_update_options = [
bigquery.job.SchemaUpdateOption.ALLOW_FIELD_ADDITION]
table_ref = dataset_ref.table(self.asset_type_to_table_name(key_name))
# use load_time as a timestamp.
job_config.time_partitioning = bigquery.table.TimePartitioning(
field='timestamp')
job_config.schema = self.to_bigquery_schema(schemas[sharded_key_name])
job_config.source_format = bigquery.SourceFormat.NEWLINE_DELIMITED_JSON
try:
load_job = self.bigquery_client.load_table_from_uri(
object_paths,
table_ref,
location=self.dataset_location,
job_config=job_config)
self.load_jobs[key_name] = load_job
except BadRequest as e:
logging.error('error in load_job %s, %s, %s, %s',
str(object_paths), str(table_ref),
str(self.dataset_location),
str(job_config.to_api_repr()))
raise e
示例14: finish_bundle
# 需要导入模块: from google.api_core import exceptions [as 别名]
# 或者: from google.api_core.exceptions import BadRequest [as 别名]
def finish_bundle(self):
self.bigquery_client = None
# wait for the load jobs to complete
for _, load_job in self.load_jobs.items():
try:
load_job.result()
except BadRequest as e:
logging.error('error in load_job %s', load_job.self_link)
raise e
示例15: get_buckets
# 需要导入模块: from google.api_core import exceptions [as 别名]
# 或者: from google.api_core.exceptions import BadRequest [as 别名]
def get_buckets(project_ids: List[str],
gcs_client: storage.Client) -> List[Dict[str, str]]:
"""Retrieves list of metadata for all buckets in a GCP org.
Args:
project_ids: List of strings holding project IDs
gcs_client: storage.Client object
Returns:
List of dictionaries mapping bucket-level metadata.
"""
output_list = []
try:
for project_id in project_ids:
try:
bucket_list = list(gcs_client.list_buckets(project=project_id))
for bucket in bucket_list:
output_list.append({
"bucket_name": bucket.name,
"project_id": project_id,
"last_read_timestamp": "",
"days_since_last_read": -1,
"read_count_30_days": -1,
"read_count_90_days": -1,
"export_day": datetime.datetime.utcnow().strftime("%Y-%m-%d"),
"recommended_OLM": ""
})
except Forbidden as err:
logging.error(f"""Access denied on bucket {bucket.name}.
{err}""")
except BadRequest as err:
logging.error(f"Could not find bucket {bucket.name}.")
logging.error(err)
pass
return output_list
except Exception as err:
logging.error(f"""Could not access buckets in {project_id}.
{err}""")