当前位置: 首页>>代码示例>>Python>>正文


Python cloud.bigquery方法代码示例

本文整理汇总了Python中google.cloud.bigquery方法的典型用法代码示例。如果您正苦于以下问题:Python cloud.bigquery方法的具体用法?Python cloud.bigquery怎么用?Python cloud.bigquery使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在google.cloud的用法示例。


在下文中一共展示了cloud.bigquery方法的13个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: get_stackoverflow_tags_count

# 需要导入模块: from google import cloud [as 别名]
# 或者: from google.cloud import bigquery [as 别名]
def get_stackoverflow_tags_count():
    """Get all the tags contains python and cloud key words"""
    query = """
            SELECT
                SPLIT(tags, '|') tags
            FROM
                `bigquery-public-data.stackoverflow.posts_questions`
            WHERE
                tags LIKE '%python%'
            AND (tags LIKE '%google-cloud-platform%' OR tags LIKE '%gcp%')
        """

    results = bq_utils.execute_query(query)

    rows = [row[0] for row in results]

    return rows 
开发者ID:GoogleCloudPlatform,项目名称:python-runtime,代码行数:19,代码来源:posts_stats.py

示例2: get_posts_list_unanswered

# 需要导入模块: from google import cloud [as 别名]
# 或者: from google.cloud import bigquery [as 别名]
def get_posts_list_unanswered():
    # Get the list of posts that are unanswered
    query = """
            SELECT
                id, title, tags
            FROM
                `bigquery-public-data.stackoverflow.posts_questions`
            WHERE
                tags LIKE '%python%'
            AND (tags LIKE '%google-cloud-platform%' OR tags LIKE '%gcp%')
            AND accepted_answer_id is NULL
            AND answer_count = 0;
        """

    results = bq_utils.execute_query(query)

    # Add current timestamp to the rows
    date_time = datetime.datetime.now()
    rows = [(date_time,) + row for row in results]

    return rows 
开发者ID:GoogleCloudPlatform,项目名称:python-runtime,代码行数:23,代码来源:posts_stats.py

示例3: _execute

# 需要导入模块: from google import cloud [as 别名]
# 或者: from google.cloud import bigquery [as 别名]
def _execute(self):
    client = self._get_client()
    for job_name in self._params['job_names']:
      # pylint: disable=protected-access
      job = bigquery.job._AsyncJob(job_name, client)
      # pylint: enable=protected-access
      job.reload()
      if job.error_result is not None:
        raise WorkerException(job.error_result['message'])
      if job.state != 'DONE':
        worker_params = {
            'job_names': self._params['job_names'],
            'bq_project_id': self._params['bq_project_id']
        }
        self._enqueue('BQWaiter', worker_params, 60)
        return 
开发者ID:google,项目名称:crmint,代码行数:18,代码来源:workers.py

示例4: query_string

# 需要导入模块: from google import cloud [as 别名]
# 或者: from google.cloud import bigquery [as 别名]
def query_string(lat, lon, start_date, end_date, satellite, sensor):
    if end_date is None:
        end_date = datetime.date.today()
    if start_date is None:
        start_date = end_date - datetime.timedelta(365)

    date_query = 'sensing_time >= "{}" AND sensing_time <= "{}"'.format(start_date, end_date)
    loc_query = 'north_lat>={} AND south_lat<={} AND west_lon<={} AND east_lon>={}'.format(lat, lat, lon, lon)
    additional_query = ''

    if satellite=='Sentinel-2':
        tab_name = '`bigquery-public-data.cloud_storage_geo_index.sentinel_2_index`'
    elif 'Landsat' in satellite:
        tab_name = '`bigquery-public-data.cloud_storage_geo_index.landsat_index`'
        if sensor is not None:
            sensor_query = ' AND sensor_id="{}"'.format(sensor.replace('OLITIRS', 'OLI_TIRS'))
            additional_query += sensor_query
        if '-' in satellite:
            # Specific query for one Landsat
            sat_query = ' AND spacecraft_id="{}"'.format(satellite.upper().replace('-', '_'))
            additional_query += sat_query
    else:
        raise KeyError('Wrong Satellite name, you entered {}'.format(satellite))

    query = 'SELECT * FROM {} WHERE {} AND {}{}'.format(tab_name, date_query, loc_query, additional_query)

    return query 
开发者ID:cmla,项目名称:tsd,代码行数:29,代码来源:search_gcloud.py

示例5: setUp

# 需要导入模块: from google import cloud [as 别名]
# 或者: from google.cloud import bigquery [as 别名]
def setUp(self):
        self.key = '/opt/key/key.json'
        self.query = 'SELECT count(*) FROM `bigquery-public-data.usa_names.usa_1910_current` WHERE year=2017 AND number>1000;' 
开发者ID:gabfl,项目名称:bigquery_fdw,代码行数:5,代码来源:test_bqclient_test.py

示例6: test_get_client

# 需要导入模块: from google import cloud [as 别名]
# 或者: from google.cloud import bigquery [as 别名]
def test_get_client(self):
        bq = bqclient_test.set_bq_instance()
        bqclient_test.set_client(bq, self.key)
        self.assertIsInstance(bqclient_test.get_client(
            bq), bigquery.client.Client) 
开发者ID:gabfl,项目名称:bigquery_fdw,代码行数:7,代码来源:test_bqclient_test.py

示例7: test_get_query_job

# 需要导入模块: from google import cloud [as 别名]
# 或者: from google.cloud import bigquery [as 别名]
def test_get_query_job(self):
        bq = bqclient_test.set_bq_instance()
        bq.location = 'US'
        bqclient_test.set_client(bq, self.key)
        bqclient_test.run_query(bq, self.query)
        self.assertIsInstance(
            bqclient_test.get_query_job(bq), bigquery.job.QueryJob)

        # To flush results
        bqclient_test.read_results(bq) 
开发者ID:gabfl,项目名称:bigquery_fdw,代码行数:12,代码来源:test_bqclient_test.py

示例8: core

# 需要导入模块: from google import cloud [as 别名]
# 或者: from google.cloud import bigquery [as 别名]
def core(self):
        return BigQueryCore("bigquery://tm-geospatial") 
开发者ID:thinkingmachines,项目名称:geomancer,代码行数:4,代码来源:test_bq.py

示例9: name

# 需要导入模块: from google import cloud [as 别名]
# 或者: from google.cloud import bigquery [as 别名]
def name(self):
        return "bigquery" 
开发者ID:thinkingmachines,项目名称:geomancer,代码行数:4,代码来源:test_bq.py

示例10: _bigquery

# 需要导入模块: from google import cloud [as 别名]
# 或者: from google.cloud import bigquery [as 别名]
def _bigquery(table_name, field_name):
  def _get_bq_client():
    try:
      return _SESSION['bq_client']
    except KeyError:
      key = os.path.join(os.path.dirname(__file__), '..', 'data',
                         'service-account.json')
      _SESSION['bq_client'] = bigquery.Client.from_service_account_json(key)
      return _SESSION['bq_client']

  def _fetch_bq_table_data(table_name):
    client = _get_bq_client()
    table_name_pieces = table_name.split('.')
    if len(table_name_pieces) == 2:
      dataset_id, table_id = table_name_pieces
    elif len(table_name_pieces) == 3:
      project_id, dataset_id, table_id = table_name_pieces
      client.project = project_id
    else:
      raise ValueError('Malformed BigQuery table name: `%s`' % table_name)
    dataset = client.dataset(dataset_id)
    table = dataset.table(table_id)
    try:
      table.reload()
    except NotFound:
      raise ValueError('BigQuery table `%s` not found' % table_name)
    field_names = [f.name for f in table.schema]
    filed_values = list(table.fetch_data(max_results=1))[0]
    _SESSION['bq_cache'][table_name] = dict(zip(field_names, filed_values))

  if table_name not in _SESSION['bq_cache']:
    _fetch_bq_table_data(table_name)
  try:
    value = _SESSION['bq_cache'][table_name][field_name]
  except KeyError:
    raise ValueError(
        "No field '%s' in BigQuery table `%s`" % (field_name, table_name))
  if isinstance(value, list):
    return '\n'.join([str(e) for e in value])
  else:
    return value 
开发者ID:google,项目名称:crmint,代码行数:43,代码来源:inline.py

示例11: _get_client

# 需要导入模块: from google import cloud [as 别名]
# 或者: from google.cloud import bigquery [as 别名]
def _get_client(self):
    bigquery.Client.SCOPE = (
        'https://www.googleapis.com/auth/bigquery',
        'https://www.googleapis.com/auth/cloud-platform',
        'https://www.googleapis.com/auth/drive')
    client = bigquery.Client.from_service_account_json(_KEY_FILE)
    if self._params['bq_project_id'].strip():
      client.project = self._params['bq_project_id']
    return client 
开发者ID:google,项目名称:crmint,代码行数:11,代码来源:workers.py

示例12: setUp

# 需要导入模块: from google import cloud [as 别名]
# 或者: from google.cloud import bigquery [as 别名]
def setUp(self):
    super(ExecutorTest, self).setUp()
    self._source_data_dir = os.path.join(
        os.path.dirname(
            os.path.dirname(os.path.dirname(os.path.dirname(__file__)))),
        'components', 'testdata')
    self._output_data_dir = os.path.join(
        os.environ.get('TEST_UNDECLARED_OUTPUTS_DIR', self.get_temp_dir()),
        self._testMethodName)
    tf.io.gfile.makedirs(self._output_data_dir)
    self._model_export = standard_artifacts.Model()
    self._model_export.uri = os.path.join(self._source_data_dir,
                                          'trainer/current')
    self._model_blessing = standard_artifacts.ModelBlessing()
    self._input_dict = {
        'model': [self._model_export],
        'model_blessing': [self._model_blessing],
    }

    self._model_push = standard_artifacts.PushedModel()
    self._model_push.uri = 'gs://bucket/test_model_path'
    self._output_dict = {
        'pushed_model': [self._model_push],
    }
    self._exec_properties = {
        'custom_config': {
            'bigquery_serving_args': {
                'model_name': 'model_name',
                'project_id': 'project_id',
                'bq_dataset_id': 'bq_dataset_id',
            },
        },
        'push_destination': None,
    }
    self._executor = Executor()

    # Setting up Mock for external services
    self.addCleanup(mock.patch.stopall)
    self.mock_bq = mock.patch.object(bigquery, 'Client', autospec=True).start()
    self.mock_check_blessing = mock.patch.object(
        Executor, 'CheckBlessing', autospec=True).start()
    self.mock_copy_dir = mock.patch.object(
        io_utils, 'copy_dir', autospec=True).start() 
开发者ID:tensorflow,项目名称:tfx,代码行数:45,代码来源:executor_test.py

示例13: search

# 需要导入模块: from google import cloud [as 别名]
# 或者: from google.cloud import bigquery [as 别名]
def search(aoi, start_date=None, end_date=None, satellite='Sentinel-2', sensor=None):
    """
    List images covering an area of interest (AOI) using Google Index.

    Args:
        aoi: geojson.Polygon or geojson.Point object
    """
    # compute the centroid of the area of interest
    lon, lat = shapely.geometry.shape(aoi).centroid.coords.xy
    lon, lat = lon[0], lat[0]

    # build query
    query = query_string(lat, lon, start_date, end_date, satellite, sensor)

    # query Gcloud BigQuery Index
    try:
        private_key = os.environ['GOOGLE_APPLICATION_CREDENTIALS']
    except KeyError as e:
        print('You must have the env variable GOOGLE_APPLICATION_CREDENTIALS linking to the cred json file')
        raise e

    # df = gbq.read_gbq(query, private_key=private_key)
    client = bigquery.Client.from_service_account_json(private_key)
    rows = list(client.query(query).result())
    df = pd.DataFrame(dict(row.items()) for row in rows)

    # check if the image footprint contains the area of interest
    if satellite == 'Sentinel-2':
        res = []
        for i, row in df.iterrows():
            footprint, epsg = get_footprint(row)
            utm_aoi = convert_aoi_to_utm(aoi, epsg)
            if footprint.contains(utm_aoi):
                res.append(row.to_dict())
    else:  # we need to remove duplicates
        order_collection_category = {'T1':0, 'T2':1, 'T3':2, 'RT':3, 'N/A':4}
        order_collection_number = {'01':0, 'PRE':1}
        df['order_collection_category'] = df['collection_category'].apply(lambda x: order_collection_category[x])
        df['order_collection_number'] = df['collection_number'].apply(lambda x: order_collection_number[x])
        unique_scene = ['wrs_path', 'wrs_row', 'spacecraft_id', 'sensor_id', 'date_acquired']
        orders = ['order_collection_number', 'order_collection_category']
        df.sort_values(by=unique_scene+orders, inplace=True)
        res = df.groupby(unique_scene).first().reset_index().drop(orders, axis=1).sort_values(by=['date_acquired']).to_dict('records')
    return res 
开发者ID:cmla,项目名称:tsd,代码行数:46,代码来源:search_gcloud.py


注:本文中的google.cloud.bigquery方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。