本文整理汇总了Python中urllib2.build_opener函数的典型用法代码示例。如果您正苦于以下问题:Python build_opener函数的具体用法?Python build_opener怎么用?Python build_opener使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了build_opener函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: send
def send(self, uri, data=''):
url = self.base_url + str(uri)
req = urllib2.Request(url)
# cookie enabled
if self.cookie == '':
self.cookie = cookielib.CookieJar()
cookie_handler = urllib2.HTTPCookieProcessor(self.cookie)
if self.debug:
http_handler = urllib2.HTTPHandler(debuglevel=1)
opener = urllib2.build_opener(cookie_handler, http_handler)
else:
opener = urllib2.build_opener(cookie_handler)
req.add_header('User-Agent', 'Mozilla/5.0 (X11; Linux i586; rv:31.0) Gecko/20100101 Firefox/31.0')
req.add_header('Content-Type', 'application/x-www-form-urlencoded')
req.add_header('Cache-Control', 'no-cache')
req.add_header('Accept', '*/*')
req.add_header('Connection', 'close')
# post data
if data:
post_data = urllib.urlencode(data)
req.add_data(post_data)
req.add_header('Content-Length', len(post_data))
try:
response = opener.open(req)
except urllib2.URLError, error:
raise FetionError(400)
exit()
示例2: getResponseMixedData
def getResponseMixedData(self, url, secureToken, dic, additionalOptions=None):
"Method sets up a REST call with mixed body data such as multipart/form-data."
# check whether proxy is given
if "proxy" in globals():
proxy_handler = urllib2.ProxyHandler(self.config.proxy)
opener = urllib2.build_opener(proxy_handler)
urllib2.install_opener(opener)
multipart = urllib2.build_opener(MultipartPostHandler.MultipartPostHandler)
urllib2.install_opener(multipart)
req = urllib2.Request(url, dic.parameters())
req.add_header('Authorization', self.config.SDK_AUTH+",oauth_token=\""+secureToken+"\"")
req.add_header('User-Agent', self.config.SDK_VERSION)
req.add_header('Accept', 'application/json')
# sets additional header fields
if additionalOptions != None:
for key in additionalOptions:
req.add_header(key, additionalOptions[key])
try:
response = urllib2.urlopen(req)
response = json.loads(response.read())
return response
except urllib2.HTTPError as e:
raise TelekomException(json.loads(e.read()))
示例3: __init__
def __init__(self, server_url, user_id, device_id, client_version,
proxies=None, proxy_exceptions=None,
password=None, token=None, repository="default",
ignored_prefixes=None, ignored_suffixes=None,
timeout=20, blob_timeout=None, cookie_jar=None,
upload_tmp_dir=None):
self.timeout = timeout
self.blob_timeout = blob_timeout
if ignored_prefixes is not None:
self.ignored_prefixes = ignored_prefixes
else:
self.ignored_prefixes = DEFAULT_IGNORED_PREFIXES
if ignored_suffixes is not None:
self.ignored_suffixes = ignored_suffixes
else:
self.ignored_suffixes = DEFAULT_IGNORED_SUFFIXES
self.upload_tmp_dir = (upload_tmp_dir if upload_tmp_dir is not None
else tempfile.gettempdir())
if not server_url.endswith('/'):
server_url += '/'
self.server_url = server_url
# TODO: actually use the repository info in the requests
self.repository = repository
self.user_id = user_id
self.device_id = device_id
self.client_version = client_version
self._update_auth(password=password, token=token)
self.cookie_jar = cookie_jar
cookie_processor = urllib2.HTTPCookieProcessor(
cookiejar=cookie_jar)
# Get proxy handler
proxy_handler = get_proxy_handler(proxies,
proxy_exceptions=proxy_exceptions,
url=self.server_url)
# Build URL openers
self.opener = urllib2.build_opener(cookie_processor, proxy_handler)
self.streaming_opener = urllib2.build_opener(cookie_processor,
proxy_handler,
*get_handlers())
# Set Proxy flag
self.is_proxy = False
for handler in self.opener.handlers:
if isinstance(handler, ProxyHandler):
if handler.proxies:
self.is_proxy = True
self.automation_url = server_url + 'site/automation/'
self.batch_upload_url = 'batch/upload'
self.batch_execute_url = 'batch/execute'
self.fetch_api()
示例4: get_html
def get_html(self):
# add cookile support
cookie = cookielib.CookieJar()
cookie_handler = urllib2.HTTPCookieProcessor(cookie)
if self.agents:
agent = choice(self.agents)
else:
agent = None
# add agent support
if agent:
proxy_handler = urllib2.ProxyHandler({'http': agent})
# proxy_handler = urllib2.ProxyHandler({'https': agent})
opener = urllib2.build_opener(cookie_handler, proxy_handler)
else:
opener = urllib2.build_opener(cookie_handler)
urllib2.install_opener(opener)
try:
datas = []
for url in self.get_urls:
req = urllib2.Request(url, headers=self.header)
html = urllib2.urlopen(req, timeout=30).read()
# add chinese support
code = chardet.detect(html)['encoding']
if code in self.zh_code:
html = html.decode('GBK').encode('utf-8')
datas.append(html)
return datas
except Exception as e:
raise Exception(e)
示例5: serveFile
def serveFile(self, fURL, sendData, httphandler = None):
cj = cookielib.LWPCookieJar(ustvpaths.COOKIE)
if httphandler is None:
opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj))
else:
opener = urllib2.build_opener(httphandler, urllib2.HTTPCookieProcessor(cj))
request = urllib2.Request(url = fURL)
opener.addheaders = []
d = {}
sheaders = self.decodeHeaderString(''.join(self.headers.headers))
for key in sheaders:
d[key] = sheaders[key]
if (key != 'Host'):
opener.addheaders = [(key, sheaders[key])]
if (key == 'User-Agent'):
opener.addheaders = [('User-Agent', 'Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:25.0) Gecko/20100101 Firefox/25.0')]
if os.path.isfile(ustvpaths.COOKIE):
cj.load(ignore_discard = True)
cj.add_cookie_header(request)
response = opener.open(request, timeout = TIMEOUT)
self.send_response(200)
headers = response.info()
for key in headers:
try:
val = headers[key]
self.send_header(key, val)
except Exception, e:
print e
pass
示例6: __init__
def __init__ (self, base, params, user=None, password=None):
self.base = base
if self.base[-1] not in "?&":
if "?" in self.base:
self.base += "&"
else:
self.base += "?"
self.params = {}
if user is not None and password is not None:
x = urllib2.HTTPPasswordMgrWithDefaultRealm()
x.add_password(None, base, user, password)
self.client = urllib2.build_opener()
auth = urllib2.HTTPBasicAuthHandler(x)
self.client = urllib2.build_opener(auth)
else:
self.client = urllib2.build_opener()
for key, val in self.defaultParams.items():
if self.base.lower().rfind("%s=" % key.lower()) == -1:
self.params[key] = val
for key in self.fields:
if params.has_key(key):
self.params[key] = params[key]
elif self.base.lower().rfind("%s=" % key.lower()) == -1:
self.params[key] = ""
示例7: getResult
def getResult(self, ip, cookieHandle, fileID):
try:
user_agent = random.choice(self.user_agents)
proxy = urllib2.ProxyHandler({'http':''+ ip +''})
opener = urllib2.build_opener(proxy)
opener.addheaders = [
('User_agent',user_agent),
('Referer','http://www.sufile.com/down/'+fileID+'.html'),
('Host','www.sufile.com'),
('DNT','1')
]
opener = urllib2.build_opener(cookieHandle)
r = opener.open('http://www.sufile.com/dd.php?file_key='+fileID+'&p=0', timeout=10)
d = r.read()
with open('./result.html', 'wb') as f:
f.write(d)
p = re.compile('<a id="downs" href="(.*?)"', re.S)
r = re.search(p, d)
print r.group(1).strip()
except urllib2.HTTPError, e:
print 'HTTPError: ' + str(e.code)
return False
示例8: refresh_feed
def refresh_feed(self, rssurl):
"""
Parses through the content of rss feed, using a proxy, if configured,
uses cache for the feed content if memcached is in use.
:param str rssurl: URL to RSS Feed
:returns: List of RSS entries
"""
headers = []
opener = urllib2.build_opener()
proxy = self.http_proxy
# If proxy set, add custom handlers
if proxy:
urlinfo = urlparse(proxy)
proxyhandler = urllib2.ProxyHandler({urlinfo.scheme : proxy})
opener = urllib2.build_opener(proxyhandler, urllib2.HTTPHandler, urllib2.HTTPSHandler)
# TODO: Use feedparser
xml = minidom.parse(opener.open(rssurl))
if xml:
root = xml.documentElement
for node in root.childNodes:
if node.nodeName == "item":
headers.append(self.get_header(node))
if node.nodeName == "channel":
for channel_child in node.childNodes:
if channel_child.nodeName == "item":
headers.append(self.get_header(channel_child))
return headers
示例9: send_web_socket
def send_web_socket(Cookie_Jar,url_to_call):
try:
import urllib2
import base64
import uuid
req = urllib2.Request(url_to_call)
str_guid=str(uuid.uuid1()).upper()
str_guid=base64.b64encode(str_guid)
req.add_header('Connection', 'Upgrade')
req.add_header('Upgrade', 'websocket')
req.add_header('Sec-WebSocket-Key', str_guid)
req.add_header('Origin','http://www.streamafrik.com')
req.add_header('Pragma','no-cache')
req.add_header('Cache-Control','no-cache')
req.add_header('Sec-WebSocket-Version', '13')
req.add_header('Sec-WebSocket-Extensions', 'permessage-deflate; client_max_window_bits, x-webkit-deflate-frame')
req.add_header('User-Agent','Mozilla/5.0 (iPhone; CPU iPhone OS 7_0_4 like Mac OS X) AppleWebKit/537.51.1 (KHTML, like Gecko) Version/7.0 Mobile/11B554a Safari/9537.53')
cookie_handler = urllib2.HTTPCookieProcessor(Cookie_Jar)
opener = urllib2.build_opener(cookie_handler, urllib2.HTTPBasicAuthHandler(), urllib2.HTTPHandler())
opener = urllib2.install_opener(opener)
from keepalive import HTTPHandler
keepalive_handler = HTTPHandler()
opener = urllib2.build_opener(keepalive_handler)
urllib2.install_opener(opener)
urllib2.urlopen(req)
response.close()
return ''
except: traceback.print_exc(file=sys.stdout)
return ''
示例10: getOpener
def getOpener(self):
#return the opener
cj=cookielib.CookieJar()
if self.__proxy is not None:
return urllib2.build_opener(urllib2.ProxyHandler({"http":self.__proxy}),urllib2.HTTPCookieProcessor(cj))
else:
return urllib2.build_opener(urllib2.HTTPCookieProcessor(cj))
示例11: doLogin
def doLogin(adminHash):
sys.stdout.write("(+) Logging into CMS.. ")
sys.stdout.flush
adminIndex = "http://" + options.target + options.dirPath + "openedit/authentication/logon.html"
values = {'loginokpage' : '', 'accountname' : 'admin', 'password' : adminHash, 'submit' : 'Login'}
data = urllib.urlencode(values)
cj = CookieJar()
if options.proxy:
try:
opener = urllib2.build_opener(getProxy(), urllib2.HTTPCookieProcessor(cj))
opener.addheaders = [('User-agent', agent)]
check = opener.open(adminIndex, data).read()
except:
print "\n(-) Proxy connection failed to remote target"
sys.exit(1)
else:
try:
opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj))
check = opener.open(adminIndex, data).read()
except:
print "(-) Target connection failed, check your address"
sys.exit(1)
if not re.search("Please enter your password", check):
sys.stdout.write("logged in successfully\n")
sys.stdout.flush()
return cj
else:
sys.stdout.write("Login Failed! Exiting..\n")
sys.stdout.flush()
sys.exit(1)
示例12: continuity
def continuity(url):
import md5
format = '%25s: %s'
# first fetch the file with the normal http handler
opener = urllib2.build_opener()
urllib2.install_opener(opener)
fo = urllib2.urlopen(url)
foo = fo.read()
fo.close()
m = md5.new(foo)
print format % ('normal urllib', m.hexdigest())
# now install the keepalive handler and try again
opener = urllib2.build_opener(HTTPHandler())
urllib2.install_opener(opener)
fo = urllib2.urlopen(url)
foo = fo.read()
fo.close()
m = md5.new(foo)
print format % ('keepalive read', m.hexdigest())
fo = urllib2.urlopen(url)
foo = ''
while 1:
f = fo.readline()
if f: foo = foo + f
else: break
fo.close()
m = md5.new(foo)
print format % ('keepalive readline', m.hexdigest())
示例13: call_service
def call_service(self):
"""调用远程服务"""
try:
encode_data = None
if self.params is not None:
if self.method == 'GET':
self.url += '?' + urlencode(self.params)
log_debug(self.url)
elif self.method == 'POST':
encode_data = urlencode(self.params)
opener = urllib2.build_opener()
opener.addheaders = self.headers
if self.cookie_jar is not None:
opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(self.cookie_jar))
res_obj = opener.open(self.url, data=encode_data, timeout=self.timeout)
self.set_cookie = res_obj.info().getheader('Set-Cookie')
self.res = res_obj.read()
# encoding
self.encoding = guess_json_utf(self.res)
if self.encoding:
self.res = self.res.decode(self.encoding)
self.json = json.loads(self.res)
self.ret = self.json.get('ret')
self.msg = self.json.get('msg')
self.data = self.json.get('data')
except Exception, e:
#log_error('[JSONService] url:%s, response:%s, expetion:%s' % (self.url, self.res, e))
return False
示例14: loadUrl
def loadUrl(url, profiler, enable_proxy = False):
loadtime = 0
try:
begin = time.time()
req = urllib2.Request(url)
req.add_header("User-Agent", "Mozilla/5.0 (Linux; Android 5.1.1; Nexus 5 Build/KOT49H) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/34.0.1847.114 Mobile Safari/537.36")
req.add_header("Accept-Encoding", "gzip,deflate,sdch")
req.add_header("Accept", "*/*")
req.add_header("Cache-Control", "no-cache")
if enable_proxy:
print "USE Turbo Proxy!!!"
proxy_handler = urllib2.ProxyHandler({"http": turbo_local_proxy})
opener = urllib2.build_opener(proxy_handler)
else:
opener = urllib2.build_opener()
resp = opener.open(req, timeout = 1000000)
cntype = resp.headers.getheader("content-type")
print "content-type", cntype
print "status code", resp.getcode()
# print "headers", resp.headers
size = len(resp.read())
loadtime = time.time() - begin
print "page size", size
print "loadtime is ", loadtime
profiler.addSize(size)
profiler.addRescources(url, resp.getcode() ,cntype, int(loadtime * 1000))
return loadtime
except ValueError:
pass
finally:
opener.close()
示例15: get_urllib_object
def get_urllib_object(uri, timeout, headers=None, verify_ssl=True, data=None):
"""Return a urllib2 object for `uri` and `timeout` and `headers`.
This is better than using urlib2 directly, for it handles SSL verifcation, makes
sure URI is utf8, and is shorter and easier to use. Modules may use this
if they need a urllib2 object to execute .read() on.
For more information, refer to the urllib2 documentation.
"""
uri = quote_query(uri)
original_headers = {'Accept': '*/*', 'User-Agent': 'Mozilla/5.0 (Willie)'}
if headers is not None:
original_headers.update(headers)
else:
headers = original_headers
if verify_ssl:
opener = urllib2.build_opener(VerifiedHTTPSHandler)
else:
opener = urllib2.build_opener()
req = urllib2.Request(uri, headers=headers, data=data)
try:
u = opener.open(req, None, timeout)
except urllib2.HTTPError as e:
# Even when there's an error (say HTTP 404), return page contents
return e.fp
return u