本文整理汇总了Python中twisted.web.client.readBody函数的典型用法代码示例。如果您正苦于以下问题:Python readBody函数的具体用法?Python readBody怎么用?Python readBody使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了readBody函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: obtain_auth_token
def obtain_auth_token(self):
"""Return a valid Microsoft Cognitive Services authentication
token, obtained with the current subscription key if necessary.
"""
start_time = time.time()
if self.auth_token is None or self.token_expiry < start_time:
headers = Headers()
headers.addRawHeader('Ocp-Apim-Subscription-Key',
self.subscription_key)
headers.addRawHeader('Content-Length', '0')
response = yield self.agent.request(
'POST', AUTH_URL, headers=headers)
if response.code != 200:
data = yield readBody(response)
self.log.error(
'Could not authenticate to Microsoft Cognitive '
'Services: {data}', data=data)
raise UserVisibleError(
'Could not authenticate to Microsoft Cognitive '
'Services. Try again later.')
# Coerce the access token to a byte string to avoid problems
# inside Twisted's header handling code down the line.
self.auth_token = (
(yield readBody(response)).strip().decode('ascii'))
self.token_expiry = start_time + AUTH_TOKEN_TTL
returnValue(self.auth_token)
示例2: invalid_version_response
def invalid_version_response(agent, request):
"""
A fixture that performs a bad handshake with a prohibited WebSocket version.
"""
response = pytest.blockon(make_request(agent, version=request.param))
yield response
client.readBody(response).cancel() # immediately close the connection
示例3: _createCommitProvisionCB
def _createCommitProvisionCB(self, agent, conn_url, header):
c_resp = yield agent.request('GET', conn_url, header)
body = yield readBody(c_resp)
c_info = json.loads(body)
self.failUnlessEquals(c_info['reservation_state'], 'ReserveHeld', 'State did not transit to held after creation')
status_url = conn_url + '/status'
# commit
producer2 = FileBodyProducer(StringIO('commit'))
resp2 = yield agent.request('POST', status_url, header, producer2)
self.failUnlessEqual(resp2.code, 200, 'Service did not return OK after commit')
# should do new call here..
c_resp = yield agent.request('GET', conn_url, header)
body = yield readBody(c_resp)
c_info2 = json.loads(body)
self.failUnlessEquals(c_info2['reservation_state'], 'ReserveStart', 'State did not transit after commit')
# provision
producer3 = FileBodyProducer(StringIO('provision'))
resp3 = yield agent.request('POST', status_url, header, producer3)
self.failUnlessEqual(resp3.code, 200, 'Service did not return OK after provision')
# give the provider a bit of time to switch
yield task.deferLater(reactor, 0.1, self._createCommitProvisionCB2, agent, conn_url, header)
示例4: test_untrusted_Origins_are_not_allowed_with_OriginCheck_Trusted
def test_untrusted_Origins_are_not_allowed_with_OriginCheck_Trusted(agent):
# When using WebSocketOriginCheck Trusted, even a same-origin request isn't
# good enough if the origin is not on the whitelist.
response = yield make_request(agent, path='/origin-whitelist',
origin=make_root())
assert response.code == 403
client.readBody(response).cancel() # immediately close the connection
示例5: bad_protocol_response
def bad_protocol_response(agent, request):
"""
A fixture that performs a bad handshake with an invalid
Sec-WebSocket-Protocol header.
"""
response = pytest.blockon(make_request(agent, protocol=request.param))
yield response
client.readBody(response).cancel() # immediately close the connection
示例6: trusted_origin_response
def trusted_origin_response(agent, request):
"""
A fixture that performs a handshake using one of the explicitly trusted test
Origins.
"""
response = pytest.blockon(make_request(agent, path='/origin-whitelist',
origin=request.param))
yield response
client.readBody(response).cancel() # immediately close the connection
示例7: increment_response
def increment_response(agent, request):
"""
A fixture that connects to the dumb-increment plugin with the given
subprotocol list.
"""
response = pytest.blockon(make_request(agent, path='/dumb-increment',
protocol=request.param))
yield response
client.readBody(response).cancel() # immediately close the connection
示例8: test_cpu_load_does_not_spike_when_idle
def test_cpu_load_does_not_spike_when_idle(agent):
"""
A regression test for issue #9 (railed CPU when a WebSocket connection is
open but idle).
"""
response = yield make_request(agent)
try:
# Now that the connection is open, see if any CPUs are in trouble.
assert not any_cpus_railed()
finally:
client.readBody(response).cancel() # close the connection
示例9: good_origin_response
def good_origin_response(agent, request):
"""
A fixture that performs a handshake with an Origin that matches the server.
"""
host = make_authority(host=request.param[0])
origin = make_root(host=request.param[0])
version = request.param[1]
response = pytest.blockon(make_request(agent, origin=origin, host=host,
version=version))
yield response
client.readBody(response).cancel() # immediately close the connection
示例10: bad_origin_response
def bad_origin_response(agent, request):
"""
A fixture that performs a good handshake, but with an Origin that does not
match the server.
"""
origin = request.param[0]
host = request.param[1]
version = request.param[2]
response = pytest.blockon(make_request(agent, origin=origin, host=host,
version=version))
yield response
client.readBody(response).cancel() # immediately close the connection
示例11: _make_request
def _make_request(self, method, **params):
headers = Headers({
"User-Agent": ["Harold ([email protected])"],
"Content-Type": ["application/x-www-form-urlencoded"],
})
body_data = {"token": self._token}
body_data.update(params)
body_producer = FormEncodedBodyProducer(body_data)
agent = Agent(reactor, pool=self._pool)
response = yield agent.request(
"POST",
"https://slack.com/api/" + method,
headers,
body_producer,
)
body = yield readBody(response)
data = json.loads(body)
if response.code == 429:
retry_after = int(response.headers.getRawHeaders("Retry-After")[0])
raise SlackWebClientRatelimitedError(retry_after)
if not data["ok"]:
raise SlackWebClientResponseError(data["error"], data)
warnings = data.get("warnings")
if warnings:
# TODO: use real logger
print("WARNING FROM SLACK: %s" % warnings)
returnValue(data)
示例12: get_body
def get_body(result):
# now that we have the body,
# we can return the result, using ready body
# which is also a async operation.
d2 = readBody(result) # get the contents of the page.
d2.addCallback(return_body)
d2.addErrback(fail)
示例13: handler_request
def handler_request(self, response, requestProcess, bodyProcess):
request_func, request_args, request_kw = requestProcess
body_func, body_args, body_kw = bodyProcess
rs = request_func(response, *request_args, **request_kw)
d = readBody(response)
d.addCallback(body_func, *body_args, **body_kw)
return d
示例14: post_urlencoded_get_raw
def post_urlencoded_get_raw(self, destination, path, accept_partial=False,
args={}):
query_bytes = urllib.urlencode(args, True)
def body_callback(method, url_bytes, headers_dict):
return FileBodyProducer(StringIO(query_bytes))
response = yield self._create_request(
destination.encode("ascii"),
"POST",
path.encode("ascii"),
body_callback=body_callback,
headers_dict={
"Content-Type": ["application/x-www-form-urlencoded"]
}
)
try:
body = yield readBody(response)
defer.returnValue(body)
except PartialDownloadError as e:
if accept_partial:
defer.returnValue(e.response)
else:
raise e
示例15: crawl_job_url
def crawl_job_url(delegator_svc, job_id, url, depth):
"""
Crawl a URL for images. Record any images that we found under the job's
record in our job store (Redis). If we encounter valid <a href> tags,
fire off additional crawling announcements for the worker pool to
tear into together, rather than trying to do it all here.
:param str job_id: The crawling job's UUID4 string.
:param str url: The URL to crawl.
:param int depth: The depth of this crawling job. If it's 0, this is the
top-level crawl in the job.
"""
# Abstraction over Twisted's HTTP client. We'll follow redirs, validate
# SSL certificates, and try to work for most cases.
response = yield visit_url(url, follow_redirs=True)
if response.code != 200:
log.err("URL %s failed with non-200 HTTP code: %d" % (url, response.code))
returnValue(None)
headers = get_response_headers(response)
# If this were a production environment, we'd probably want to try to
# figure out chunked response body parsing. We could end up with some
# huge body sizes as-is.
body = yield readBody(response)
# Look through the response's body for possible images and other links.
image_urls, links_to_crawl = parse_response(url, headers, body)
yield record_images_for_url(job_id, url, image_urls)
# Rather than try to follow the links in the current invocation, hand
# these off so the work may be distributed across the pool.
if links_to_crawl and depth < MAX_CRAWL_DEPTH:
enqueue_crawling_job(delegator_svc, job_id, links_to_crawl, depth=depth + 1)