本文整理汇总了Python中aiohttp.client_exceptions方法的典型用法代码示例。如果您正苦于以下问题:Python aiohttp.client_exceptions方法的具体用法?Python aiohttp.client_exceptions怎么用?Python aiohttp.client_exceptions使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类aiohttp
的用法示例。
在下文中一共展示了aiohttp.client_exceptions方法的4个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: get_cat_image_url
# 需要导入模块: import aiohttp [as 别名]
# 或者: from aiohttp import client_exceptions [as 别名]
def get_cat_image_url(timeout: float) -> str:
api_url = 'http://thecatapi.com/api/images/get'
async with aiohttp.ClientSession() as session:
while True:
try:
async with session.get(
api_url, params={'format': 'xml', 'type': 'jpg,png'}
) as res:
if res.status != 200:
raise APIServerError
xml_result = await res.read()
tree = etree.fromstring(xml_result)
url = tree.find('data/images/image/url').text
except aiohttp.client_exceptions.ServerDisconnectedError:
await asyncio.sleep(0.1)
continue
try:
async with async_timeout.timeout(timeout=timeout):
async with session.get(url) as res:
async with res:
if res.status == 200:
return url
except (aiohttp.ClientConnectorError, asyncio.TimeoutError):
continue
示例2: get_dog_image_url
# 需要导入模块: import aiohttp [as 别名]
# 或者: from aiohttp import client_exceptions [as 别名]
def get_dog_image_url(timeout: float) -> str:
api_url = 'https://dog.ceo/api/breeds/image/random'
async with aiohttp.ClientSession() as session:
while True:
try:
async with session.get(api_url) as res:
if res.status != 200:
raise APIServerError
data = await res.json(loads=json.loads)
url = data['message']
except aiohttp.client_exceptions.ServerDisconnectedError:
await asyncio.sleep(0.1)
continue
try:
async with async_timeout.timeout(timeout=timeout):
async with session.get(url) as res:
async with res:
if res.status == 200:
return url
except (aiohttp.ClientConnectorError, asyncio.TimeoutError):
continue
示例3: add
# 需要导入模块: import aiohttp [as 别名]
# 或者: from aiohttp import client_exceptions [as 别名]
def add(self, bot, event: Message, sess, url: str):
async with aiohttp.ClientSession() as session:
try:
async with session.get(url) as res:
data: bytes = await res.read()
except aiohttp.client_exceptions.InvalidURL:
await bot.say(event.channel, f'`{url}`은 올바른 URL이 아니에요!')
return
except aiohttp.client_exceptions.ClientConnectorError:
await bot.say(event.channel, f'`{url}`에 접속할 수 없어요!')
return
if not data:
await bot.say(event.channel, f'`{url}`은 빈 웹페이지에요!')
return
f = feedparser.parse(data)
if f.bozo != 0:
await bot.say(event.channel, f'`{url}`은 올바른 RSS 문서가 아니에요!')
return
feed = RSSFeedURL()
feed.channel = event.channel.id
feed.url = url
feed.updated_at = max(
[
dateutil.parser.parse(entry.published).astimezone(UTC)
for entry in f.entries
]
)
with sess.begin():
sess.add(feed)
await bot.say(
event.channel, f'<#{event.channel.id}> 채널에서 `{url}`을 구독하기 시작했어요!'
)
示例4: handler_proxy
# 需要导入模块: import aiohttp [as 别名]
# 或者: from aiohttp import client_exceptions [as 别名]
def handler_proxy(self, req):
headers = req.headers.copy()
self.logger.debug("headers: %s", headers)
subdomain = self.subdomain(headers)
path = req.path
self.logger.debug("subdomain: %r, path: %r", subdomain, path)
upstream = self.get_upstream(subdomain, path)
if not upstream:
return web.Response(status=HTTPStatus.NOT_FOUND)
if (
headers.get("connection", "").lower() == "upgrade"
and headers.get("upgrade", "").lower() == "websocket"
and req.method == "GET"
):
# Handle websocket proxy
try:
async with aiohttp.ClientSession(
cookies=req.cookies
) as client_session:
async with client_session.ws_connect(
upstream
) as ws_client:
ws_server = web.WebSocketResponse()
await ws_server.prepare(req)
self.loop.create_task(
asyncio.wait(
[
self.wsforward(ws_server, ws_client),
self.wsforward(ws_client, ws_server),
],
return_when=asyncio.FIRST_COMPLETED,
)
)
return ws_server
except aiohttp.client_exceptions.WSServerHandshakeError:
return web.Response(status=HTTPStatus.NOT_FOUND)
else:
# Handle regular HTTP request proxy
self.logger.debug(
"upstream for (%r): %s", upstream, (subdomain, path)
)
async with client.request(
req.method,
upstream,
headers=headers,
allow_redirects=False,
data=await req.read(),
) as res:
self.logger.debug(
"upstream url(%s) status: %d", upstream, res.status
)
return web.Response(
headers=res.headers,
status=res.status,
body=await res.read(),
)
return ws_server