本文整理汇总了Python中cachecontrol.CacheControl.proxies方法的典型用法代码示例。如果您正苦于以下问题:Python CacheControl.proxies方法的具体用法?Python CacheControl.proxies怎么用?Python CacheControl.proxies使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类cachecontrol.CacheControl
的用法示例。
在下文中一共展示了CacheControl.proxies方法的4个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: getURL
# 需要导入模块: from cachecontrol import CacheControl [as 别名]
# 或者: from cachecontrol.CacheControl import proxies [as 别名]
def getURL(url, post_data=None, params=None, headers=None, timeout=30, session=None, json=False):
"""
Returns a byte-string retrieved from the url provider.
"""
# request session
cache_dir = sickbeard.CACHE_DIR or _getTempDir()
session = CacheControl(sess=session, cache=caches.FileCache(os.path.join(cache_dir, "sessions")))
# request session headers
req_headers = {"User-Agent": USER_AGENT, "Accept-Encoding": "gzip,deflate"}
if headers:
req_headers.update(headers)
session.headers.update(req_headers)
# request session ssl verify
session.verify = False
# request session paramaters
session.params = params
try:
# Remove double-slashes from url
parsed = list(urlparse.urlparse(url))
parsed[2] = re.sub("/{2,}", "/", parsed[2]) # replace two or more / with one
url = urlparse.urlunparse(parsed)
# request session proxies
if sickbeard.PROXY_SETTING:
logger.log("Using proxy for url: " + url, logger.DEBUG)
session.proxies = {"http": sickbeard.PROXY_SETTING, "https": sickbeard.PROXY_SETTING}
# decide if we get or post data to server
if post_data:
resp = session.post(url, data=post_data, timeout=timeout)
else:
resp = session.get(url, timeout=timeout)
if not resp.ok:
logger.log(
u"Requested url "
+ url
+ " returned status code is "
+ str(resp.status_code)
+ ": "
+ clients.http_error_code[resp.status_code],
logger.DEBUG,
)
return
except requests.exceptions.HTTPError, e:
logger.log(u"HTTP error " + str(e.errno) + " while loading URL " + url, logger.WARNING)
return
示例2: getURL
# 需要导入模块: from cachecontrol import CacheControl [as 别名]
# 或者: from cachecontrol.CacheControl import proxies [as 别名]
def getURL(url, post_data=None, params=None, headers={}, timeout=30, session=None, json=False):
"""
Returns a byte-string retrieved from the url provider.
"""
# request session
cache_dir = sickbeard.CACHE_DIR or _getTempDir()
session = CacheControl(sess=session, cache=caches.FileCache(os.path.join(cache_dir, 'sessions')))
# request session headers
session.headers.update({'User-Agent': USER_AGENT, 'Accept-Encoding': 'gzip,deflate'})
session.headers.update(headers)
# request session ssl verify
session.verify = False
# request session paramaters
session.params = params
try:
# request session proxies
if sickbeard.PROXY_SETTING:
logger.log("Using proxy for url: " + url, logger.DEBUG)
session.proxies = {
"http": sickbeard.PROXY_SETTING,
"https": sickbeard.PROXY_SETTING,
}
# decide if we get or post data to server
if post_data:
resp = session.post(url, data=post_data, timeout=timeout)
else:
resp = session.get(url, timeout=timeout)
if not resp.ok:
logger.log(u"Requested url " + url + " returned status code is " + str(
resp.status_code) + ': ' + clients.http_error_code[resp.status_code], logger.DEBUG)
return
except requests.exceptions.HTTPError, e:
logger.log(u"HTTP error " + str(e.errno) + " while loading URL " + url, logger.WARNING)
return
示例3: getURL
# 需要导入模块: from cachecontrol import CacheControl [as 别名]
# 或者: from cachecontrol.CacheControl import proxies [as 别名]
def getURL(url, post_data=None, params=None, headers=None, timeout=30, session=None, json=False):
"""
Returns a byte-string retrieved from the url provider.
"""
# request session
session = CacheControl(sess=session, cache=caches.FileCache(os.path.join(sickbeard.CACHE_DIR, 'sessions')))
# request session headers
req_headers = {'User-Agent': USER_AGENT, 'Accept-Encoding': 'gzip,deflate'}
if headers:
req_headers.update(headers)
session.headers.update(req_headers)
# request session ssl verify
session.verify = False
# request session paramaters
session.params = params
try:
# Remove double-slashes from url
parsed = list(urlparse.urlparse(url))
parsed[2] = re.sub("/{2,}", "/", parsed[2]) # replace two or more / with one
url = urlparse.urlunparse(parsed)
# request session proxies
if sickbeard.PROXY_SETTING:
logger.log("Using proxy for url: " + url, logger.DEBUG)
session.proxies = {
"http": sickbeard.PROXY_SETTING,
"https": sickbeard.PROXY_SETTING,
}
resp = session.get(url, data=post_data, timeout=timeout)
except requests.exceptions.HTTPError, e:
logger.log(u"HTTP error " + str(e.errno) + " while loading URL " + url, logger.WARNING)
return
示例4: download_file
# 需要导入模块: from cachecontrol import CacheControl [as 别名]
# 或者: from cachecontrol.CacheControl import proxies [as 别名]
def download_file(url, filename, session=None):
# create session
session = CacheControl(sess=session, cache=caches.FileCache(os.path.join(sickbeard.CACHE_DIR, 'sessions')))
# request session headers
session.headers.update({'User-Agent': USER_AGENT, 'Accept-Encoding': 'gzip,deflate'})
# request session ssl verify
session.verify = False
# request session streaming
session.stream = True
# request session proxies
if sickbeard.PROXY_SETTING:
logger.log("Using proxy for url: " + url, logger.DEBUG)
session.proxies = {
"http": sickbeard.PROXY_SETTING,
"https": sickbeard.PROXY_SETTING,
}
try:
resp = session.get(url)
if not resp.ok:
return False
with open(filename, 'wb') as fp:
for chunk in resp.iter_content(chunk_size=1024):
if chunk:
fp.write(chunk)
fp.flush()
chmodAsParent(filename)
except requests.exceptions.HTTPError, e:
_remove_file_failed(filename)
logger.log(u"HTTP error " + str(e.errno) + " while loading URL " + url, logger.WARNING)
return False