本文整理汇总了Python中urllib2.install_opener函数的典型用法代码示例。如果您正苦于以下问题:Python install_opener函数的具体用法?Python install_opener怎么用?Python install_opener使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了install_opener函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: findMovieReviewers
def findMovieReviewers(movie_id, subUrl) :
print movie_id
print subUrl
reload(sys)
sys.setdefaultencoding('utf-8')
cj = cookielib.LWPCookieJar()
try:
cj.revert('douban.cookie')
except:
try :
dou=douban()
username='[email protected]'
password='123654'
domain='http://www.douban.com/'
origURL='http://www.douban.com/login'
dou.setinfo(username,password,domain,origURL)
dou.signin()
except :
return
opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj))
urllib2.install_opener(opener)
collectPage = urllib2.urlopen("http://movie.douban.com/subject/" + movie_id + "/reviews" + subUrl, timeout=20).read().encode('utf-8')
soup = BeautifulSoup(collectPage, 'html.parser')
#init db connection
conn = MySQLdb.connect(host='localhost',user='root',passwd='root')
curs = conn.cursor()
conn.select_db('pydb')
reviewsOfThisPage = soup.findAll("a", { "class" : "review-hd-avatar" })
countReviews = len(reviewsOfThisPage)
print countReviews
for review in reviewsOfThisPage :
reviewSoup = BeautifulSoup(str(review), 'html.parser')
userId = reviewSoup.a["href"].split("/")[4]
try :
#insert data into db rowbyrow
curs.execute('INSERT INTO users (user_id) VALUES (%s)', userId)
print "rows affected " + str(curs.rowcount)
except :
print "error inserting, probably duplicate for userid : " + userId
None
try :
foundSubUrl = soup.find("a", { "class" : "next" })['href']
except :
foundSubUrl = ""
print foundSubUrl
conn.commit()
curs.close()
conn.close()
if "" != foundSubUrl and countReviews > 0 :
time.sleep( 2 )
findMovieReviewers(movie_id, foundSubUrl)
示例2: query
def query(searchstr, outformat, allresults=False):
"""Return a list of bibtex items."""
logging.debug("Query: %s" % searchstr)
searchstr = "/scholar?q=" + urllib2.quote(searchstr)
url = GOOGLE_SCHOLAR_URL + searchstr
header = HEADERS
header["Cookie"] = header["Cookie"] + ":CF=%d" % outformat
for proxy_addr in proxy_list:
try:
proxy = urllib2.ProxyHandler({"http": proxy_addr})
opener = urllib2.build_opener(proxy)
urllib2.install_opener(opener)
request = urllib2.Request(url, headers=header)
response = urllib2.urlopen(request, timeout=5)
print "Success HTTP-Agent:" + proxy_addr
break
except urllib2.URLError, e:
if hasattr(e, "code"):
print str(e.code) + e.msg + proxy_addr
if e.code == 403 or e.code == 503:
proxy_list.remove(proxy_addr)
elif e.reason.message == "timed out":
print "Timed Out" + proxy_addr
proxy_list.remove(proxy_addr)
continue
示例3: save
def save(self):
# TODO: new IP address should be added in a side-by-side manner
# or the interface wouldn't appear once IP was changed.
retval = super(GlobalConfigurationForm, self).save()
whattoreload = "hostname"
if self.instance._orig_gc_ipv4gateway != self.cleaned_data.get('gc_ipv4gateway'):
whattoreload = "networkgeneral"
if self.instance._orig_gc_ipv6gateway != self.cleaned_data.get('gc_ipv6gateway'):
whattoreload = "networkgeneral"
notifier().reload(whattoreload)
http_proxy = self.cleaned_data.get('gc_httpproxy')
if http_proxy:
os.environ['http_proxy'] = http_proxy
os.environ['https_proxy'] = http_proxy
elif not http_proxy:
if 'http_proxy' in os.environ:
del os.environ['http_proxy']
if 'https_proxy' in os.environ:
del os.environ['https_proxy']
# Reset global opener so ProxyHandler can be recalculated
urllib2.install_opener(None)
return retval
示例4: openurl
def openurl(self,url):
"""
打开网页
"""
cookie_support= urllib2.HTTPCookieProcessor(cookielib.CookieJar())
self.opener = urllib2.build_opener(cookie_support, urllib2.HTTPHandler)
urllib2.install_opener(self.opener)
user_agents = [
'Mozilla/5.0 (Windows; U; Windows NT 5.1; it; rv:1.8.1.11) Gecko/20071127 Firefox/2.0.0.11',
'Opera/9.25 (Windows NT 5.1; U; en)',
'Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; SV1; .NET CLR 1.1.4322; .NET CLR 2.0.50727)',
'Mozilla/5.0 (compatible; Konqueror/3.5; Linux) KHTML/3.5.5 (like Gecko) (Kubuntu)',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.8.0.12) Gecko/20070731 Ubuntu/dapper-security Firefox/1.5.0.12',
'Lynx/2.8.5rel.1 libwww-FM/2.14 SSL-MM/1.4.1 GNUTLS/1.2.9',
"Mozilla/5.0 (X11; Linux i686) AppleWebKit/535.7 (KHTML, like Gecko) Ubuntu/11.04 Chromium/16.0.912.77 Chrome/16.0.912.77 Safari/535.7",
"Mozilla/5.0 (X11; Ubuntu; Linux i686; rv:10.0) Gecko/20100101 Firefox/10.0 ",
]
agent = random.choice(user_agents)
self.opener.addheaders = [("User-agent",agent),
("Accept","*/*"),
('Referer', 'http://www.google.com')
]
try:
res = self.opener.open(url)
return res.read()
except:
return None
示例5: __init__
def __init__(self, username, realm_id, config, debug=False):
self._version = QAPI_VERSION
self._cookiejar = CookieJar()
self._username = username
self._realm_id = realm_id
self._profile = '@'.join((username, realm_id))
self._realm = REALMS[self._realm_id]
self._proxy = None
self._templates = None
self._debug = debug
self._config = None #. User configuration file for scripted mode
self._connected = False
self._username = 'nobody'
self._cFM = None
try:
from ConfigFileManager import ConfigFileManager, InternalConfigError
try:
self._config = ConfigFileManager(config)
self._qapi_ini = self._config.option('qapi', 'ini')
self._cFM = ConfigFileManager(self._qapi_ini)
except InternalConfigError as e:
raise Exception("Sorry, %s" % e)
except ImportError as e:
raise Exception("Sorry, %s" % e)
urllib2.install_opener(self._opener())
示例6: getResponseJSONData
def getResponseJSONData(self, url, secureToken, jsonString, additionalOptions=None):
"Method sends a JSON encoded string via REST"
if "proxy" in globals(): # set proxy if necessary
proxy_handler = urllib2.ProxyHandler(self.config.proxy)
opener = urllib2.build_opener(proxy_handler)
urllib2.install_opener(opener)
req = urllib2.Request(url, jsonString)
# define header fields
req.add_header('Authorization', self.config.SDK_AUTH+",oauth_token=\""+secureToken+"\"")
req.add_header('User-Agent', self.config.SDK_VERSION)
req.add_header('Accept', 'application/json')
req.add_header('Content-Type', 'application/json')
#req.add_header('Content-Length', len(json))
# establish call
try:
response = urllib2.urlopen(req)
response = json.loads(response.read())
return response
except urllib2.HTTPError as e: # catch other status codes than '0000' and raise a new TelekomException containing 'statusCode' and 'statusMessage'
raise TelekomException(json.loads(e.read()))
示例7: _login
def _login(self):
"""
Authenticates a user in a bugzilla tracker
"""
if not (self.backend_user and self.backend_password):
printdbg("No account data provided. Not logged in bugzilla")
return
import cookielib
cookie_j = cookielib.CookieJar()
cookie_h = urllib2.HTTPCookieProcessor(cookie_j)
url = self._get_login_url(self.url)
values = {'Bugzilla_login': self.backend_user,
'Bugzilla_password': self.backend_password}
opener = urllib2.build_opener(cookie_h)
urllib2.install_opener(opener)
data = urllib.urlencode(values)
request = urllib2.Request(url, data)
urllib2.urlopen(request)
for i, c in enumerate(cookie_j):
self.cookies[c.name] = c.value
printout("Logged in bugzilla as %s" % self.backend_user)
printdbg("Bugzilla session cookies: %s" % self.cookies)
示例8: _opener
def _opener(self):
build = [urllib2.HTTPHandler()]
if self.request.redirect:
build.append(urllib2.HTTPRedirectHandler())
if self.request.proxy_host and self.request.proxy_port:
build.append(urllib2.ProxyHandler(
{self.request.proxy_protocol: self.request.proxy_host + ':' + str(self.request.proxy_port)}))
if self.request.proxy_username:
proxy_auth_handler = urllib2.ProxyBasicAuthHandler()
proxy_auth_handler.add_password('realm', 'uri', self.request.proxy_username,
self.request.proxy_password)
build.append(proxy_auth_handler)
if self.request.cookies:
self.request.cookies = os.path.join(self._dirname, self.request.cookies)
self.cookies = cookielib.MozillaCookieJar()
if os.path.isfile(self.request.cookies):
self.cookies.load(self.request.cookies)
build.append(urllib2.HTTPCookieProcessor(self.cookies))
urllib2.install_opener(urllib2.build_opener(*build))
示例9: __init__
def __init__(self, login, password, hostname, port=8091):
self.passman = urllib2.HTTPPasswordMgrWithDefaultRealm()
self.passman.add_password(None, "http://%s:%d/" % (hostname, int(port)), login, password)
self.hostname = hostname
self.port = port
self.opener = urllib2.build_opener(urllib2.HTTPBasicAuthHandler(self.passman))
urllib2.install_opener(self.opener)
示例10: fetch_data_from_url
def fetch_data_from_url(url):
"""Downloads and returns data from a url"""
request = urllib2.Request(url)
opener = urllib2.build_opener()
urllib2.install_opener(opener)
data = opener.open(request).read()
return data
示例11: _connect
def _connect(self, request):
""" Connect to the secured database by opening the request.
Required:
urllib2.Request request The URL Request.
Return:
str serialized_response response data
"""
# create a password manager
password_mgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
# Add the username and password.
# If we knew the realm, we could use it instead of None.
password_mgr.add_password(
None,
self.base_url(),
self._username,
self._password)
handler = urllib2.HTTPBasicAuthHandler(password_mgr)
# create "opener" (OpenerDirector instance)
opener = urllib2.build_opener(handler)
# Install the opener.
# Now all calls to urllib2.urlopen use our opener.
urllib2.install_opener(opener)
serialized_response = urllib2.urlopen(request).read()
return serialized_response
示例12: login_website
def login_website():
'''csdn'''
cook_jar=cookielib.CookieJar()
cookie_support=urllib2.HTTPCookieProcessor(cook_jar)
opener=urllib2.build_opener(cookie_support,urllib2.HTTPHandler)
urllib2.install_opener(opener)
print 'logging'
login_url='http://passport.csdn.net/ajax/accounthandler.ashx?t=log&u=dylinshi&p=123456a&remember=0&f=http%3A%2F%2Fblog.csdn.net%2F&rand=0.363029723724382'
user_agents = [
'Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US; rv:1.9.1.6) Gecko/20091201 Firefox/3.5.6',
'Opera/9.25 (Windows NT 5.1; U; en)',
'Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; SV1; .NET CLR 1.1.4322; .NET CLR 2.0.50727)',
'Mozilla/5.0 (compatible; Konqueror/3.5; Linux) KHTML/3.5.5 (like Gecko) (Kubuntu)',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.8.0.12) Gecko/20070731 Ubuntu/dapper-security Firefox/1.5.0.12',
'Lynx/2.8.5rel.1 libwww-FM/2.14 SSL-MM/1.4.1 GNUTLS/1.2.9'
]
headers={
'User-Agent':user_agents[0],
'Referer':settings.S_start_urls[0]
}
req=urllib2.Request(url=login_url,headers=headers)
res = urllib2.urlopen(req)
print 'code is :'+str(res.code)
if res.code<=200:
print 'login %s success'%settings.S_target_website
else:
print 'login %s fail'%settings.S_target_website
print cook_jar._cookies
return res
示例13: get_current_sequence
def get_current_sequence(self):
"""get the current sequence from the paylist"""
url = self.get_sequence_url()
header = self.get_header()
req = urllib2.Request(url, None, header)
opener = urllib2.build_opener()
opener.add_handler(urllib2.HTTPCookieProcessor(self.get_cookie()))
try:
opener.add_handler(self.get_proxy())
except:
log.warning('can not add proxy')
urllib2.install_opener(opener)
try:
response = urllib2.urlopen(req, timeout=10)
stream = response.read()
except:
return 0
try:
for line in stream.split('\n'):
if line.startswith('#EXT-X-MEDIA-SEQUENCE'):
return line.split(':')[1]
except:
return 0
示例14: run
def run(self):
global proxyLists
global proxyCheckedLists
while proxyLists:
proxyLock.acquire() #获取锁
proxyList = proxyLists.pop() #推出一个代理ip信息
proxyLock.release()
cookie = urllib2.HTTPCookieProcessor() #使用cookie
proxyHandle = urllib2.ProxyHandler({"http" : r"http://%s:%s" % (proxyList[0], proxyList[1])})
opener = urllib2.build_opener(cookie, proxyHandle)
opener.addheaders = [("User-Agent", "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/37.0.2062.102 Safari/537.36")]
urllib2.install_opener(opener)
t1 = time.time()
try:
req = urllib2.urlopen(self.test_url, timeout=self.timeout)
result = req.read()
pos = result.find(self.test_str)
timeused = time.time() - t1
proxyList.append(timeused)
if pos > 1:
proxyLock.acquire()
proxyCheckedLists.append(proxyList)
proxyLock.release()
except Exception,e:
continue
示例15: fx_opener
def fx_opener(request):
request.addfinalizer(
functools.partial(setattr, urllib2, '_opener', urllib2._opener)
)
opener = urllib2.build_opener(TestHTTPHandler)
urllib2.install_opener(opener)
return opener