本文整理汇总了Python中urllib2.build_opener方法的典型用法代码示例。如果您正苦于以下问题:Python urllib2.build_opener方法的具体用法?Python urllib2.build_opener怎么用?Python urllib2.build_opener使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类urllib2
的用法示例。
在下文中一共展示了urllib2.build_opener方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: retrieve_status_page
# 需要导入模块: import urllib2 [as 别名]
# 或者: from urllib2 import build_opener [as 别名]
def retrieve_status_page(user, password, url):
try:
ssl._create_unverified_context
except AttributeError:
pass
else:
ssl._create_default_https_context = ssl._create_unverified_context
password_mgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
password_mgr.add_password(None, url, user, password)
handler = urllib2.HTTPBasicAuthHandler(password_mgr)
opener = urllib2.build_opener(handler)
urllib2.install_opener(opener)
req = urllib2.Request(url)
try:
response = urllib2.urlopen(req)
return response.read()
except Exception:
raise CrawlError("can't access to http://%s", url)
示例2: reverseip
# 需要导入模块: import urllib2 [as 别名]
# 或者: from urllib2 import build_opener [as 别名]
def reverseip(url):
"""return domains from given the same server"""
# get only domain name
url = urlparse(url).netloc if urlparse(url).netloc != '' else urlparse(url).path.split("/")[0]
source = "http://domains.yougetsignal.com/domains.php"
useragent = useragents.get()
contenttype = "application/x-www-form-urlencoded; charset=UTF-8"
# POST method
opener = urllib2.build_opener(
urllib2.HTTPHandler(), urllib2.HTTPSHandler())
data = urllib.urlencode([('remoteAddress', url), ('key', '')])
request = urllib2.Request(source, data)
request.add_header("Content-type", contenttype)
request.add_header("User-Agent", useragent)
try:
result = urllib2.urlopen(request).read()
except urllib2.HTTPError, e:
print >> sys.stderr, "[{}] HTTP error".format(e.code)
示例3: probe_html5
# 需要导入模块: import urllib2 [as 别名]
# 或者: from urllib2 import build_opener [as 别名]
def probe_html5(self, result):
class NoRedirectHandler(urllib2.HTTPRedirectHandler):
def http_error_302(self, req, fp, code, msg, headers):
infourl = urllib.addinfourl(fp, headers, req.get_full_url())
infourl.status = code
infourl.code = code
return infourl
http_error_300 = http_error_302
http_error_301 = http_error_302
http_error_303 = http_error_302
http_error_307 = http_error_302
opener = urllib2.build_opener(NoRedirectHandler())
urllib2.install_opener(opener)
r = urllib2.urlopen(urllib2.Request(result['url'], headers=result['headers']))
if r.code == 200:
result['url'] = r.read()
return result
示例4: __init__
# 需要导入模块: import urllib2 [as 别名]
# 或者: from urllib2 import build_opener [as 别名]
def __init__(
self,
host,
port=8069,
timeout=120,
version=None,
deserialize=True,
opener=None,
):
super(ConnectorJSONRPC, self).__init__(host, port, timeout, version)
self.deserialize = deserialize
# One URL opener (with cookies handling) shared between
# JSON and HTTP requests
if opener is None:
cookie_jar = CookieJar()
opener = build_opener(HTTPCookieProcessor(cookie_jar))
self._opener = opener
self._proxy_json, self._proxy_http = self._get_proxies()
示例5: prepare_us_proxy
# 需要导入模块: import urllib2 [as 别名]
# 或者: from urllib2 import build_opener [as 别名]
def prepare_us_proxy(cookie_handler):
if (addon.getSetting('us_proxy_socks5') == 'true'):
if ((addon.getSetting('us_proxy_pass') is not '') and (addon.getSetting('us_proxy_user') is not '')):
print 'Using socks5 authenticated proxy: ' + addon.getSetting('us_proxy') + ':' + addon.getSetting('us_proxy_port')
socks_handler = SocksiPyHandler(socks.PROXY_TYPE_SOCKS5, addon.getSetting('us_proxy'), int(addon.getSetting('us_proxy_port')), True, addon.getSetting('us_proxy_user'), addon.getSetting('us_proxy_pass'))
opener = urllib2.build_opener(socks_handler, cookie_handler)
else:
print 'Using socks5 proxy: ' + addon.getSetting('us_proxy') + ':' + addon.getSetting('us_proxy_port')
socks_handler = SocksiPyHandler(socks.PROXY_TYPE_SOCKS5, addon.getSetting('us_proxy'), int(addon.getSetting('us_proxy_port')), True)
opener = urllib2.build_opener(socks_handler, cookie_handler)
elif (addon.getSetting('us_proxy_socks5') == 'false'):
us_proxy = 'http://' + addon.getSetting('us_proxy') + ':' + addon.getSetting('us_proxy_port')
proxy_handler = urllib2.ProxyHandler({'http' : us_proxy})
if ((addon.getSetting('us_proxy_pass') is not '') and (addon.getSetting('us_proxy_user') is not '')):
print 'Using authenticated proxy: ' + us_proxy
password_mgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
password_mgr.add_password(None, us_proxy, addon.getSetting('us_proxy_user'), addon.getSetting('us_proxy_pass'))
proxy_auth_handler = urllib2.ProxyBasicAuthHandler(password_mgr)
opener = urllib2.build_opener(proxy_handler, proxy_auth_handler, cookie_handler)
else:
print 'Using proxy: ' + us_proxy
opener = urllib2.build_opener(proxy_handler, cookie_handler)
return opener
示例6: serveFile
# 需要导入模块: import urllib2 [as 别名]
# 或者: from urllib2 import build_opener [as 别名]
def serveFile(self, fURL, sendData, httphandler = None, cookienum = 0):
cj = cookielib.LWPCookieJar(ustvpaths.COOKIE % str(cookienum))
if httphandler is None:
opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj))
else:
opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj), httphandler)
request = urllib2.Request(url = fURL)
sheaders = self.decodeHeaderString(self.headers.headers)
del sheaders['Host']
sheaders['User-Agent'] = 'Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:25.0) Gecko/20100101 Firefox/25.0'
for key in sheaders:
opener.addheaders = [(key, sheaders[key])]
if os.path.isfile(ustvpaths.COOKIE % str(cookienum)):
cj.load(ignore_discard = True)
cj.add_cookie_header(request)
response = opener.open(request, timeout = TIMEOUT)
self.send_response(200)
headers = response.info()
for key in headers:
try:
self.send_header(key, headers[key])
except Exception, e:
print "Exception: ", e
pass
示例7: totalPage
# 需要导入模块: import urllib2 [as 别名]
# 或者: from urllib2 import build_opener [as 别名]
def totalPage(self):
self.loadedCookies = self.loadCookies()
if not self.loadedCookies:
return False
# page index start from 0 end at max-1
req = urllib2.Request('http://dict.youdao.com/wordbook/wordlist?p=0&tags=')
opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(self.loadedCookies))
urllib2.install_opener(opener)
response = urllib2.urlopen(req)
source = response.read()
if '密码错误' in source:
return False
else:
try:
return int(re.search('<a href="wordlist.p=(.*).tags=" class="next-page">最后一页</a>', source, re.M | re.I).group(1)) - 1
except Exception:
return 1
示例8: read_openload
# 需要导入模块: import urllib2 [as 别名]
# 或者: from urllib2 import build_opener [as 别名]
def read_openload(url):
default_headers = dict()
default_headers[
"User-Agent"] = "Mozilla/5.0 AppleWebKit/537.36 (KHTML, like Gecko) Chrome/66.0.3163.100 Safari/537.36"
default_headers["Accept"] = "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8"
default_headers["Accept-Language"] = "es-ES,es;q=0.8,en-US;q=0.5,en;q=0.3"
default_headers["Accept-Charset"] = "UTF-8"
default_headers["Accept-Encoding"] = "gzip"
cj = cookielib.MozillaCookieJar()
request_headers = default_headers.copy()
url = urllib.quote(url, safe="%/:=&?~#+!$,;'@()*[]")
handlers = [urllib2.HTTPHandler(debuglevel=False)]
handlers.append(NoRedirectHandler())
handlers.append(urllib2.HTTPCookieProcessor(cj))
opener = urllib2.build_opener(*handlers)
req = urllib2.Request(url, None, request_headers)
handle = opener.open(req, timeout=None)
return handle.headers.dict.get('location')
示例9: getUrlc
# 需要导入模块: import urllib2 [as 别名]
# 或者: from urllib2 import build_opener [as 别名]
def getUrlc(url, data=None, header={}, usecookies=True):
cj = cookielib.LWPCookieJar()
if usecookies:
opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj))
urllib2.install_opener(opener)
if not header:
header = {'User-Agent': UA}
req = urllib2.Request(url, data, headers=header)
try:
response = urllib2.urlopen(req, timeout=15)
link = response.read()
response.close()
except:
link=''
c = ''.join(['%s=%s' % (c.name, c.value) for c in cj]) if cj else ''
return link, c
示例10: getUrlrh
# 需要导入模块: import urllib2 [as 别名]
# 或者: from urllib2 import build_opener [as 别名]
def getUrlrh(url, data=None, header={}, usecookies=True):
cj = cookielib.LWPCookieJar()
if usecookies:
opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj))
urllib2.install_opener(opener)
if not header:
header = {'User-Agent':UA}
rh={}
req = urllib2.Request(url, data, headers=header)
try:
response = urllib2.urlopen(req, timeout=15)
for k in response.headers.keys(): rh[k]=response.headers[k]
link = response.read()
response.close()
except:
link=''
c = ''.join(['%s=%s' % (c.name, c.value) for c in cj]) if cj else ''
return link,rh
示例11: get_access_token
# 需要导入模块: import urllib2 [as 别名]
# 或者: from urllib2 import build_opener [as 别名]
def get_access_token(self, code, state=None):
'''
In callback url: http://host/callback?code=123&state=xyz
use code and state to get an access token.
'''
kw = dict(client_id=self._client_id, client_secret=self._client_secret, code=code)
if self._redirect_uri:
kw['redirect_uri'] = self._redirect_uri
if state:
kw['state'] = state
opener = build_opener(HTTPSHandler)
request = Request('https://github.com/login/oauth/access_token', data=_encode_params(kw))
request.get_method = _METHOD_MAP['POST']
request.add_header('Accept', 'application/json')
try:
response = opener.open(request, timeout=TIMEOUT)
r = _parse_json(response.read())
if 'error' in r:
raise ApiAuthError(str(r.error))
return str(r.access_token)
except HTTPError as e:
raise ApiAuthError('HTTPError when get access token')
示例12: __init__
# 需要导入模块: import urllib2 [as 别名]
# 或者: from urllib2 import build_opener [as 别名]
def __init__(self, user, pwd, softId="110614",
softKey="469c0d8a805a40f39d3c1ec3c9281e9c",
codeType="1004"):
self.softId = softId
self.softKey = softKey
self.user = user
self.pwd = pwd
self.codeType = codeType
self.uid = "100"
self.initUrl = "http://common.taskok.com:9000/Service/ServerConfig.aspx"
self.version = '1.1.1.2'
self.cookieJar = cookielib.CookieJar()
self.opener = urllib2.build_opener(
urllib2.HTTPCookieProcessor(self.cookieJar))
self.loginUrl = None
self.uploadUrl = None
self.codeUrl = None
self.params = []
self.uKey = None
示例13: urlopen
# 需要导入模块: import urllib2 [as 别名]
# 或者: from urllib2 import build_opener [as 别名]
def urlopen(url, data=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT,
cafile=None, capath=None, cadefault=False, context=None):
global _opener
if cafile or capath or cadefault:
if context is not None:
raise ValueError(
"You can't pass both context and any of cafile, capath, and "
"cadefault"
)
if not _have_ssl:
raise ValueError('SSL support not available')
context = ssl.create_default_context(purpose=ssl.Purpose.SERVER_AUTH,
cafile=cafile,
capath=capath)
https_handler = HTTPSHandler(context=context)
opener = build_opener(https_handler)
elif context:
https_handler = HTTPSHandler(context=context)
opener = build_opener(https_handler)
elif _opener is None:
_opener = opener = build_opener()
else:
opener = _opener
return opener.open(url, data, timeout)
示例14: send_report
# 需要导入模块: import urllib2 [as 别名]
# 或者: from urllib2 import build_opener [as 别名]
def send_report(self, payload=None):
if not payload:
self.logger.debug('Timer triggered report')
if self.msg_stack:
payload = self.msg_stack.pop(-1)
self.logger.debug('Timer triggered report')
else:
self.logger.debug('No more messages to send. Time stopped')
self.timer.stop()
return
handler = urllib2.HTTPHandler()
opener = urllib2.build_opener(handler)
data = urllib.urlencode(payload)
request = urllib2.Request(self.MAIL_URL, data=data)
request.get_method = lambda: "POST"
try:
connection = opener.open(request)
except urllib2.HTTPError, e:
connection = e
示例15: retrieve_status_page
# 需要导入模块: import urllib2 [as 别名]
# 或者: from urllib2 import build_opener [as 别名]
def retrieve_status_page(hostname, port, user, password):
statusPage = "http://%s:%s/manager/status?XML=true" % (hostname, port)
password_mgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
password_mgr.add_password(None, statusPage, user, password)
handler = urllib2.HTTPBasicAuthHandler(password_mgr)
opener = urllib2.build_opener(handler)
urllib2.install_opener(opener)
req = urllib2.Request(statusPage)
try:
response = urllib2.urlopen(req)
return response.read()
except Exception:
raise CrawlError("can't access to http://%s:%s",
hostname, port)