本文整理汇总了Python中utils.net.url_read_json函数的典型用法代码示例。如果您正苦于以下问题:Python url_read_json函数的具体用法?Python url_read_json怎么用?Python url_read_json使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了url_read_json函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: retrieve_results
def retrieve_results(
base_url, shard_index, task_id, timeout, should_stop, output_collector):
"""Retrieves results for a single task ID.
Returns:
<result dict> on success.
None on failure.
"""
assert isinstance(timeout, float), timeout
result_url = '%s/_ah/api/swarming/v1/task/%s/result' % (base_url, task_id)
output_url = '%s/_ah/api/swarming/v1/task/%s/stdout' % (base_url, task_id)
started = now()
deadline = started + timeout if timeout else None
attempt = 0
while not should_stop.is_set():
attempt += 1
# Waiting for too long -> give up.
current_time = now()
if deadline and current_time >= deadline:
logging.error('retrieve_results(%s) timed out on attempt %d',
base_url, attempt)
return None
# Do not spin too fast. Spin faster at the beginning though.
# Start with 1 sec delay and for each 30 sec of waiting add another second
# of delay, until hitting 15 sec ceiling.
if attempt > 1:
max_delay = min(15, 1 + (current_time - started) / 30.0)
delay = min(max_delay, deadline - current_time) if deadline else max_delay
if delay > 0:
logging.debug('Waiting %.1f sec before retrying', delay)
should_stop.wait(delay)
if should_stop.is_set():
return None
# Disable internal retries in net.url_read_json, since we are doing retries
# ourselves.
# TODO(maruel): We'd need to know if it's a 404 and not retry at all.
# TODO(maruel): Sadly, we currently have to poll here. Use hanging HTTP
# request on GAE v2.
result = net.url_read_json(result_url, retry_50x=False)
if not result:
continue
if result['state'] in State.STATES_NOT_RUNNING:
# TODO(maruel): Not always fetch stdout?
out = net.url_read_json(output_url)
result['output'] = out.get('output') if out else out
# Record the result, try to fetch attached output files (if any).
if output_collector:
# TODO(vadimsh): Respect |should_stop| and |deadline| when fetching.
output_collector.process_shard_result(shard_index, result)
if result.get('internal_failure'):
logging.error('Internal error!')
elif result['state'] == 'BOT_DIED':
logging.error('Bot died!')
return result
示例2: poll_server
def poll_server(botobj, quit_bit):
"""Polls the server to run one loop.
Returns True if executed some action, False if server asked the bot to sleep.
"""
# Access to a protected member _XXX of a client class - pylint: disable=W0212
start = time.time()
resp = net.url_read_json(
botobj.server + '/swarming/api/v1/bot/poll', data=botobj._attributes)
if not resp:
return False
logging.debug('Server response:\n%s', resp)
cmd = resp['cmd']
if cmd == 'sleep':
quit_bit.wait(resp['duration'])
return False
if cmd == 'terminate':
quit_bit.set()
# This is similar to post_update() in task_runner.py.
params = {
'cost_usd': 0,
'duration': 0,
'exit_code': 0,
'hard_timeout': False,
'id': botobj.id,
'io_timeout': False,
'output': '',
'output_chunk_start': 0,
'task_id': resp['task_id'],
}
net.url_read_json(
botobj.server + '/swarming/api/v1/bot/task_update/%s' % resp['task_id'],
data=params)
return False
if cmd == 'run':
if run_manifest(botobj, resp['manifest'], start):
# Completed a task successfully so update swarming_bot.zip if necessary.
update_lkgbc(botobj)
# TODO(maruel): Handle the case where quit_bit.is_set() happens here. This
# is concerning as this means a signal (often SIGTERM) was received while
# running the task. Make sure the host is properly restarting.
elif cmd == 'update':
update_bot(botobj, resp['version'])
elif cmd == 'restart':
if _in_load_test_mode():
logging.warning('Would have restarted: %s' % resp['message'])
else:
botobj.restart(resp['message'])
else:
raise ValueError('Unexpected command: %s\n%s' % (cmd, resp))
return True
示例3: CMDreproduce
def CMDreproduce(parser, args):
"""Runs a task locally that was triggered on the server.
This running locally the same commands that have been run on the bot. The data
downloaded will be in a subdirectory named 'work' of the current working
directory.
"""
options, args = parser.parse_args(args)
if len(args) != 1:
parser.error("Must specify exactly one task id.")
url = options.swarming + "/_ah/api/swarming/v1/task/%s/request" % args[0]
request = net.url_read_json(url)
if not request:
print >> sys.stderr, "Failed to retrieve request data for the task"
return 1
if not os.path.isdir("work"):
os.mkdir("work")
properties = request["properties"]
env = None
if properties["env"]:
env = os.environ.copy()
logging.info("env: %r", properties["env"])
env.update((i["key"].encode("utf-8"), i["value"].encode("utf-8")) for i in properties["env"])
try:
return subprocess.call(properties["command"], env=env, cwd="work")
except OSError as e:
print >> sys.stderr, "Failed to run: %s" % " ".join(properties["command"])
print >> sys.stderr, str(e)
return 1
示例4: post_update
def post_update(swarming_server, params, exit_code, stdout, output_chunk_start):
"""Posts task update to task_update.
Arguments:
swarming_server: Base URL to Swarming server.
params: Default JSON parameters for the POST.
exit_code: Process exit code, only when a command completed.
stdout: Incremental output since last call, if any.
output_chunk_start: Total number of stdout previously sent, for coherency
with the server.
"""
params = params.copy()
if exit_code is not None:
params['exit_code'] = exit_code
if stdout:
# The output_chunk_start is used by the server to make sure that the stdout
# chunks are processed and saved in the DB in order.
params['output'] = base64.b64encode(stdout)
params['output_chunk_start'] = output_chunk_start
# TODO(maruel): Support early cancellation.
# https://code.google.com/p/swarming/issues/detail?id=62
resp = net.url_read_json(
swarming_server+'/swarming/api/v1/bot/task_update/%s' % params['task_id'],
data=params)
logging.debug('post_update() = %s', resp)
if not resp or resp.get('error'):
# Abandon it. This will force a process exit.
raise ValueError(resp.get('error') if resp else 'Failed to contact server')
示例5: CMDbots
def CMDbots(parser, args):
"""Returns information about the bots connected to the Swarming server."""
add_filter_options(parser)
parser.filter_group.add_option(
"--dead-only", action="store_true", help="Only print dead bots, useful to reap them and reimage broken bots"
)
parser.filter_group.add_option("-k", "--keep-dead", action="store_true", help="Do not filter out dead bots")
parser.filter_group.add_option("-b", "--bare", action="store_true", help="Do not print out dimensions")
options, args = parser.parse_args(args)
if options.keep_dead and options.dead_only:
parser.error("Use only one of --keep-dead and --dead-only")
bots = []
cursor = None
limit = 250
# Iterate via cursors.
base_url = options.swarming + "/_ah/api/swarming/v1/bots/list?limit=%d" % limit
while True:
url = base_url
if cursor:
url += "&cursor=%s" % urllib.quote(cursor)
data = net.url_read_json(url)
if data is None:
print >> sys.stderr, "Failed to access %s" % options.swarming
return 1
bots.extend(data["items"])
cursor = data.get("cursor")
if not cursor:
break
for bot in natsort.natsorted(bots, key=lambda x: x["bot_id"]):
if options.dead_only:
if not bot.get("is_dead"):
continue
elif not options.keep_dead and bot.get("is_dead"):
continue
# If the user requested to filter on dimensions, ensure the bot has all the
# dimensions requested.
dimensions = {i["key"]: i["value"] for i in bot["dimensions"]}
for key, value in options.dimensions:
if key not in dimensions:
break
# A bot can have multiple value for a key, for example,
# {'os': ['Windows', 'Windows-6.1']}, so that --dimension os=Windows will
# be accepted.
if isinstance(dimensions[key], list):
if value not in dimensions[key]:
break
else:
if value != dimensions[key]:
break
else:
print bot["bot_id"]
if not options.bare:
print " %s" % json.dumps(dimensions, sort_keys=True)
if bot.get("task_id"):
print " task: %s" % bot["task_id"]
return 0
示例6: CMDbot_delete
def CMDbot_delete(parser, args):
"""Forcibly deletes bots from the Swarming server."""
parser.add_option(
'-f', '--force', action='store_true',
help='Do not prompt for confirmation')
options, args = parser.parse_args(args)
if not args:
parser.error('Please specific bots to delete')
bots = sorted(args)
if not options.force:
print('Delete the following bots?')
for bot in bots:
print(' %s' % bot)
if raw_input('Continue? [y/N] ') not in ('y', 'Y'):
print('Goodbye.')
return 1
result = 0
for bot in bots:
url = '%s/_ah/api/swarming/v1/bot/%s/delete' % (options.swarming, bot)
if net.url_read_json(url, data={}, method='POST') is None:
print('Deleting %s failed. Probably already gone' % bot)
result = 1
return result
示例7: swarming_trigger
def swarming_trigger(swarming, raw_request, xsrf_token):
"""Triggers a request on the Swarming server and returns the json data.
It's the low-level function.
Returns:
{
'request': {
'created_ts': u'2010-01-02 03:04:05',
'name': ..
},
'task_id': '12300',
}
"""
logging.info('Triggering: %s', raw_request['name'])
headers = {'X-XSRF-Token': xsrf_token}
result = net.url_read_json(
swarming + '/swarming/api/v1/client/request',
data=raw_request,
headers=headers)
if not result:
on_error.report('Failed to trigger task %s' % raw_request['name'])
return None
return result
示例8: CMDput_bot_config
def CMDput_bot_config(parser, args):
"""Uploads a new version of bot_config.py."""
options, args = parser.parse_args(args)
if len(args) != 1:
parser.error("Must specify file to upload")
url = options.swarming + "/_ah/api/swarming/v1/server/put_bot_config"
with open(args[0], "rb") as f:
content = f.read().decode("utf-8")
data = net.url_read_json(url, data={"content": content})
print data
return 0
示例9: CMDreproduce
def CMDreproduce(parser, args):
"""Runs a task locally that was triggered on the server.
This running locally the same commands that have been run on the bot. The data
downloaded will be in a subdirectory named 'work' of the current working
directory.
"""
options, args = parser.parse_args(args)
if len(args) != 1:
parser.error('Must specify exactly one task id.')
url = options.swarming + '/swarming/api/v1/client/task/%s/request' % args[0]
request = net.url_read_json(url)
if not request:
print >> sys.stderr, 'Failed to retrieve request data for the task'
return 1
if not os.path.isdir('work'):
os.mkdir('work')
swarming_host = urlparse.urlparse(options.swarming).netloc
properties = request['properties']
for data_url, _ in properties['data']:
assert data_url.startswith('https://'), data_url
data_host = urlparse.urlparse(data_url).netloc
if data_host != swarming_host:
auth.ensure_logged_in('https://' + data_host)
content = net.url_read(data_url)
if content is None:
print >> sys.stderr, 'Failed to download %s' % data_url
return 1
with zipfile.ZipFile(StringIO.StringIO(content)) as zip_file:
zip_file.extractall('work')
env = None
if properties['env']:
env = os.environ.copy()
logging.info('env: %r', properties['env'])
env.update(
(k.encode('utf-8'), v.encode('utf-8'))
for k, v in properties['env'].iteritems())
exit_code = 0
for cmd in properties['commands']:
try:
c = subprocess.call(cmd, env=env, cwd='work')
except OSError as e:
print >> sys.stderr, 'Failed to run: %s' % ' '.join(cmd)
print >> sys.stderr, str(e)
c = 1
if not exit_code:
exit_code = c
return exit_code
示例10: CMDput_bot_config
def CMDput_bot_config(parser, args):
"""Uploads a new version of bot_config.py."""
options, args = parser.parse_args(args)
if len(args) != 1:
parser.error('Must specify file to upload')
url = options.swarming + '/_ah/api/swarming/v1/server/put_bot_config'
path = unicode(os.path.abspath(args[0]))
with fs.open(path, 'rb') as f:
content = f.read().decode('utf-8')
data = net.url_read_json(url, data={'content': content})
print data
return 0
示例11: endpoints_api_discovery_apis
def endpoints_api_discovery_apis(host):
"""Uses Cloud Endpoints' API Discovery Service to returns metadata about all
the APIs exposed by a host.
https://developers.google.com/discovery/v1/reference/apis/list
"""
data = net.url_read_json(host + '/_ah/api/discovery/v1/apis')
if data is None:
raise APIError('Failed to discover APIs on %s' % host)
out = {}
for api in data['items']:
if api['id'] == 'discovery:v1':
continue
# URL is of the following form:
# url = host + (
# '/_ah/api/discovery/v1/apis/%s/%s/rest' % (api['id'], api['version'])
api_data = net.url_read_json(api['discoveryRestUrl'])
if api_data is None:
raise APIError('Failed to discover %s on %s' % (api['id'], host))
out[api['id']] = api_data
return out
示例12: swarming_handshake
def swarming_handshake(swarming):
"""Initiates the connection to the Swarming server."""
headers = {'X-XSRF-Token-Request': '1'}
response = net.url_read_json(
swarming + '/swarming/api/v1/client/handshake',
headers=headers,
data={})
if not response:
logging.error('Failed to handshake with server')
return None
logging.info('Connected to server version: %s', response['server_version'])
return response['xsrf_token']
示例13: url_read_json
def url_read_json(self, resource, **kwargs):
url = self.url + resource
if kwargs.get('data') == None:
# No XSRF token required for GET.
return net.url_read_json(url, **kwargs)
if self.need_refresh():
self.refresh_token()
resp = self._url_read_json_post(url, **kwargs)
if resp is None:
raise Error('Failed to connect to %s; %s' % (url, self.expiration))
return resp
示例14: refresh_token
def refresh_token(self):
"""Returns a fresh token. Necessary as the token may expire after an hour.
"""
url = self.url + self.token_resource
resp = net.url_read_json(
url,
headers={'X-XSRF-Token-Request': '1'},
data=self.xsrf_request_params)
if resp is None:
raise Error('Failed to connect to %s' % url)
self.token = resp['xsrf_token']
return self.token
示例15: CMDquery
def CMDquery(parser, args):
"""Returns information about the bots connected to the Swarming server."""
add_filter_options(parser)
parser.filter_group.add_option(
'--dead-only', action='store_true',
help='Only print dead bots, useful to reap them and reimage broken bots')
parser.filter_group.add_option(
'-k', '--keep-dead', action='store_true',
help='Do not filter out dead bots')
parser.filter_group.add_option(
'-b', '--bare', action='store_true',
help='Do not print out dimensions')
options, args = parser.parse_args(args)
if options.keep_dead and options.dead_only:
parser.error('Use only one of --keep-dead and --dead-only')
auth.ensure_logged_in(options.swarming)
data = net.url_read_json(options.swarming + '/swarming/api/v1/bots')
if data is None:
print >> sys.stderr, 'Failed to access %s' % options.swarming
return 1
for machine in natsort.natsorted(data['machines'], key=lambda x: x['id']):
if options.dead_only:
if not machine['is_dead']:
continue
elif not options.keep_dead and machine['is_dead']:
continue
# If the user requested to filter on dimensions, ensure the bot has all the
# dimensions requested.
dimensions = machine['dimensions']
for key, value in options.dimensions:
if key not in dimensions:
break
# A bot can have multiple value for a key, for example,
# {'os': ['Windows', 'Windows-6.1']}, so that --dimension os=Windows will
# be accepted.
if isinstance(dimensions[key], list):
if value not in dimensions[key]:
break
else:
if value != dimensions[key]:
break
else:
print machine['id']
if not options.bare:
print ' %s' % json.dumps(dimensions, sort_keys=True)
return 0