本文整理汇总了Python中aiohttp.ServerTimeoutError方法的典型用法代码示例。如果您正苦于以下问题:Python aiohttp.ServerTimeoutError方法的具体用法?Python aiohttp.ServerTimeoutError怎么用?Python aiohttp.ServerTimeoutError使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类aiohttp
的用法示例。
在下文中一共展示了aiohttp.ServerTimeoutError方法的7个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: _fetch_token
# 需要导入模块: import aiohttp [as 别名]
# 或者: from aiohttp import ServerTimeoutError [as 别名]
def _fetch_token(self, claim: str):
"""
Requests a short-term token from the DDB Auth Service given a Discord user claim in JWT form.
:param str claim: The JWT representing the Discord user.
:returns: A tuple representing the short-term token for the user and its TTL, or (None, None).
:rtype: tuple[str or None,int or None]
"""
body = {"Token": claim}
try:
async with self.http.post(AUTH_DISCORD, json=body) as resp:
if not 199 < resp.status < 300:
raise AuthException(f"Auth Service returned {resp.status}: {await resp.text()}")
try:
data = await resp.json()
except (aiohttp.ContentTypeError, ValueError, TypeError):
raise AuthException(f"Could not deserialize Auth Service response: {await resp.text()}")
except aiohttp.ServerTimeoutError:
raise AuthException("Timed out connecting to Auth Service")
return data['token'], data.get('ttl')
示例2: test_get_http_utilization_for_all_tasks_timeout
# 需要导入模块: import aiohttp [as 别名]
# 或者: from aiohttp import ServerTimeoutError [as 别名]
def test_get_http_utilization_for_all_tasks_timeout():
fake_marathon_tasks = [
mock.Mock(id="fake-service.fake-instance", host="fake_host", ports=[30101])
]
mock_json_mapper = mock.Mock(side_effect=aiohttp.ServerTimeoutError)
with asynctest.patch(
"paasta_tools.autoscaling.autoscaling_service_lib.get_json_body_from_service",
autospec=True,
):
assert (
autoscaling_service_lib.get_http_utilization_for_all_tasks(
marathon_service_config=mock.Mock(),
marathon_tasks=fake_marathon_tasks,
endpoint="fake-endpoint",
json_mapper=mock_json_mapper,
)
== 1.0
)
示例3: test_exceptions
# 需要导入模块: import aiohttp [as 别名]
# 或者: from aiohttp import ServerTimeoutError [as 别名]
def test_exceptions(self):
import aiohttp
exceptions = aiohttp_.AiohttpClient.exceptions
with pytest.raises(exceptions.BaseClientException):
raise aiohttp.ClientError()
with pytest.raises(exceptions.BaseClientException):
# Test polymorphism
raise aiohttp.InvalidURL("invalid")
with pytest.raises(exceptions.ConnectionError):
raise aiohttp.ClientConnectionError()
with pytest.raises(exceptions.ConnectionTimeout):
raise aiohttp.ClientConnectorError.__new__(
aiohttp.ClientConnectorError
)
with pytest.raises(exceptions.ServerTimeout):
raise aiohttp.ServerTimeoutError()
with pytest.raises(exceptions.SSLError):
raise aiohttp.ClientSSLError.__new__(aiohttp.ClientSSLError)
with pytest.raises(exceptions.InvalidURL):
raise aiohttp.InvalidURL("invalid")
示例4: get_http_utilization_for_a_task
# 需要导入模块: import aiohttp [as 别名]
# 或者: from aiohttp import ServerTimeoutError [as 别名]
def get_http_utilization_for_a_task(
task, service, instance, endpoint, json_mapper, session
):
"""
Gets the task utilization by fetching json from an http endpoint
and applying a function that maps it to a utilization.
:param task: the Marathon task to get data from
:param service: service name
:param endpoint: the http endpoint to get the task stats from
:param json_mapper: a function that takes a dictionary for a task and returns that task's utilization
:returns: the service's utilization, from 0 to 1, or None
"""
try:
return json_mapper(
await get_json_body_from_service(
host=task.host, port=task.ports[0], endpoint=endpoint, session=session
)
)
except aiohttp.ServerTimeoutError:
# If we time out querying an endpoint, assume the task is fully loaded
# This won't trigger in the event of DNS error or when a request is refused
log.error(
f"Received a timeout when querying {service}.{instance} on {task.host}:{task.ports[0]}. Assuming the service is at full utilization."
)
return 1.0
except Exception as e:
log.error(
f"Caught exception when querying {service}.{instance} on {task.host}:{task.ports[0]} : {str(e)}"
)
示例5: run
# 需要导入模块: import aiohttp [as 别名]
# 或者: from aiohttp import ServerTimeoutError [as 别名]
def run(self, wait: int = DEFAULT_WAIT):
""" Run user polling forever
Can be manually stopped with:
>> user.stop()
"""
self.wait = wait
logger.info("Polling will be started. Is it OK?")
await self.get_server()
await self.on.dispatch()
self.middleware.add_middleware(self.on.pre_p)
self.status.dispatched = True
logger.debug("User Polling successfully started")
while not self._stop:
try:
event = await self.make_long_request(self.long_poll_server)
if isinstance(event, dict) and event.get("ts"):
self.loop.create_task(self.emulate(event))
self.long_poll_server["ts"] = event["ts"]
else:
await self.get_server()
except (
aiohttp.ClientConnectionError,
aiohttp.ServerTimeoutError,
TimeoutError,
):
# No internet connection
logger.warning("Server Timeout Error!")
except:
logger.error(
"While user lp was running error occurred \n\n{}".format(
traceback.format_exc()
)
)
logger.error("Polling was stopped")
示例6: read
# 需要导入模块: import aiohttp [as 别名]
# 或者: from aiohttp import ServerTimeoutError [as 别名]
def read(self, url, *args, use_proxy=False, **kwargs) -> FeedResponse:
headers = content = None
try:
if use_proxy:
headers, content, url, status = await self._read_by_proxy(url, *args, **kwargs)
else:
headers, content, url, status = await self._read(url, *args, **kwargs)
except (socket.gaierror, aiodns.error.DNSError):
status = FeedResponseStatus.DNS_ERROR.value
except (socket.timeout, TimeoutError, aiohttp.ServerTimeoutError,
asyncio.TimeoutError, concurrent.futures.TimeoutError):
status = FeedResponseStatus.CONNECTION_TIMEOUT.value
except (ssl.SSLError, ssl.CertificateError,
aiohttp.ServerFingerprintMismatch,
aiohttp.ClientSSLError,
aiohttp.ClientConnectorSSLError,
aiohttp.ClientConnectorCertificateError):
status = FeedResponseStatus.SSL_ERROR.value
except (aiohttp.ClientProxyConnectionError,
aiohttp.ClientHttpProxyError):
status = FeedResponseStatus.PROXY_ERROR.value
except (ConnectionError,
aiohttp.ServerDisconnectedError,
aiohttp.ServerConnectionError,
aiohttp.ClientConnectionError,
aiohttp.ClientConnectorError):
status = FeedResponseStatus.CONNECTION_RESET.value
except (aiohttp.WSServerHandshakeError, aiohttp.ClientOSError):
status = FeedResponseStatus.CONNECTION_ERROR.value
except aiohttp.ClientPayloadError:
status = FeedResponseStatus.CHUNKED_ENCODING_ERROR.value
except UnicodeDecodeError:
status = FeedResponseStatus.CONTENT_DECODING_ERROR.value
except FeedReaderError as ex:
status = ex.status
LOG.warning(type(ex).__name__ + " url=%s %s", url, ex)
except (aiohttp.ClientResponseError, aiohttp.ContentTypeError) as ex:
status = ex.status
except (aiohttp.ClientError, aiohttp.InvalidURL):
status = FeedResponseStatus.UNKNOWN_ERROR.value
builder = FeedResponseBuilder(use_proxy=use_proxy)
builder.url(url)
builder.status(status)
builder.content(content)
builder.headers(headers)
return builder.build()
示例7: retrieve_certificates
# 需要导入模块: import aiohttp [as 别名]
# 或者: from aiohttp import ServerTimeoutError [as 别名]
def retrieve_certificates(loop, url=None, ctl_offset=0, output_directory='/tmp/', concurrency_count=DOWNLOAD_CONCURRENCY):
async with aiohttp.ClientSession(loop=loop, conn_timeout=10) as session:
ctl_logs = await certlib.retrieve_all_ctls(session)
if url:
url = url.strip("'")
for log in ctl_logs:
if url and url not in log['url']:
continue
work_deque = deque()
download_results_queue = asyncio.Queue(maxsize=MAX_QUEUE_SIZE)
logging.info("Downloading certificates for {}".format(log['description']))
try:
log_info = await certlib.retrieve_log_info(log, session)
except (aiohttp.ClientConnectorError, aiohttp.ServerTimeoutError, aiohttp.ClientOSError, aiohttp.ClientResponseError) as e:
logging.error("Failed to connect to CTL! -> {} - skipping.".format(e))
continue
try:
await certlib.populate_work(work_deque, log_info, start=ctl_offset)
except Exception as e:
logging.error("Log needs no update - {}".format(e))
continue
download_tasks = asyncio.gather(*[
download_worker(session, log_info, work_deque, download_results_queue)
for _ in range(concurrency_count)
])
processing_task = asyncio.ensure_future(processing_coro(download_results_queue, output_dir=output_directory))
queue_monitor_task = asyncio.ensure_future(queue_monitor(log_info, work_deque, download_results_queue))
asyncio.ensure_future(download_tasks)
await download_tasks
await download_results_queue.put(None) # Downloads are done, processing can stop
await processing_task
queue_monitor_task.cancel()
logging.info("Completed {}, stored at {}!".format(
log_info['description'],
'/tmp/{}.csv'.format(log_info['url'].replace('/', '_'))
))
logging.info("Finished downloading and processing {}".format(log_info['url']))