本文整理匯總了Python中urllib.request.getproxies方法的典型用法代碼示例。如果您正苦於以下問題:Python request.getproxies方法的具體用法?Python request.getproxies怎麽用?Python request.getproxies使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在類urllib.request
的用法示例。
在下文中一共展示了request.getproxies方法的13個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Python代碼示例。
示例1: _make_pool
# 需要導入模塊: from urllib import request [as 別名]
# 或者: from urllib.request import getproxies [as 別名]
def _make_pool(
self,
parsed_dsn, # type: Dsn
http_proxy, # type: Optional[str]
https_proxy, # type: Optional[str]
ca_certs, # type: Optional[Any]
):
# type: (...) -> Union[PoolManager, ProxyManager]
proxy = None
# try HTTPS first
if parsed_dsn.scheme == "https" and (https_proxy != ""):
proxy = https_proxy or getproxies().get("https")
# maybe fallback to HTTP proxy
if not proxy and (http_proxy != ""):
proxy = http_proxy or getproxies().get("http")
opts = self._get_pool_options(ca_certs)
if proxy:
return urllib3.ProxyManager(proxy, **opts)
else:
return urllib3.PoolManager(**opts)
示例2: proxies_from_env
# 需要導入模塊: from urllib import request [as 別名]
# 或者: from urllib.request import getproxies [as 別名]
def proxies_from_env() -> Dict[str, ProxyInfo]:
proxy_urls = {k: URL(v) for k, v in getproxies().items()
if k in ('http', 'https')}
netrc_obj = netrc_from_env()
stripped = {k: strip_auth_from_url(v) for k, v in proxy_urls.items()}
ret = {}
for proto, val in stripped.items():
proxy, auth = val
if proxy.scheme == 'https':
client_logger.warning(
"HTTPS proxies %s are not supported, ignoring", proxy)
continue
if netrc_obj and auth is None:
auth_from_netrc = None
if proxy.host is not None:
auth_from_netrc = netrc_obj.authenticators(proxy.host)
if auth_from_netrc is not None:
# auth_from_netrc is a (`user`, `account`, `password`) tuple,
# `user` and `account` both can be username,
# if `user` is None, use `account`
*logins, password = auth_from_netrc
login = logins[0] if logins[0] else logins[-1]
auth = BasicAuth(cast(str, login), cast(str, password))
ret[proto] = ProxyInfo(proxy, auth)
return ret
示例3: get_proxy_url
# 需要導入模塊: from urllib import request [as 別名]
# 或者: from urllib.request import getproxies [as 別名]
def get_proxy_url(url):
proxies = getproxies()
url_parts = parse_url_config(url)
proxy_keys = [
url_parts['scheme'] + '://' + url_parts['netloc'],
url_parts['scheme'],
'all://' + url_parts['netloc'],
'all'
]
for key in proxy_keys:
if key in proxies:
return proxies[key]
return None
示例4: LyricWikia
# 需要導入模塊: from urllib import request [as 別名]
# 或者: from urllib.request import getproxies [as 別名]
def LyricWikia(artist, title):
proxy = request.getproxies()
url = 'http://lyrics.wikia.com/api.php?action=lyrics&artist={artist}&song={title}&fmt=json&func=getSong'.format(
artist=artist, title=title).replace(" ", "%20")
r = requests.get(url, timeout=15, proxies=proxy)
# We got some bad formatted JSON data... So we need to fix stuff :/
returned = r.text
returned = returned.replace("\'", "\"")
returned = returned.replace("song = ", "")
returned = json.loads(returned)
if returned["lyrics"] != "Not found":
# set the url to the url we just received, and retrieving it
timed = True
url = returned["url"] + "/lrc"
r = requests.get(url, timeout=15, proxies=proxy)
if r.status_code == 404:
timed = False
url = returned["url"]
r = requests.get(url, timeout=15, proxies=proxy)
soup = BeautifulSoup(r.text, 'html.parser')
soup = soup.find("div", {"class": "lyricbox"})
[elem.extract() for elem in soup.findAll('div')]
[elem.replaceWith('\n') for elem in soup.findAll('br')]
# with old BeautifulSoup the following is needed..? For recent versions, this isn't needed/doesn't work
try:
# soup = BeautifulSoup(str(soup), convertEntities=BeautifulSoup.HTML_ENTITIES)
soup = BeautifulSoup(str(soup), 'html.parser')
except:
pass
soup = BeautifulSoup(re.sub(r'(<!--[.\s\S]*-->)', '', str(soup)), 'html.parser')
[elem.extract() for elem in soup.findAll('script')]
return soup.getText(), url, timed
else:
return "error", "", False
示例5: check_version
# 需要導入模塊: from urllib import request [as 別名]
# 或者: from urllib.request import getproxies [as 別名]
def check_version() -> bool:
proxy = request.getproxies()
try:
return get_version() >= \
float(requests.get("https://api.github.com/repos/SimonIT/spotifylyrics/tags", timeout=5, proxies=proxy)
.json()[0]["name"])
except Exception:
return True
示例6: __init__
# 需要導入模塊: from urllib import request [as 別名]
# 或者: from urllib.request import getproxies [as 別名]
def __init__(
self,
username: str,
password: str,
challenge_type: Optional[str] = "email",
headers: Optional[CaseInsensitiveDictType] = None,
proxies: Optional[Proxies] = None,
**kwargs: Any,
) -> None:
self.session: requests.Session = requests.session()
self.session.headers = HEADERS if headers is None else headers
self.session.proxies = getproxies() if proxies is None else proxies
self.session.verify = certifi.where()
self.expires_at = datetime.strptime("1970", "%Y").replace(
tzinfo=pytz.UTC
) # some time in the past
self.username: str = username
self.password: str = password
if challenge_type not in ["email", "sms"]:
raise ValueError("challenge_type must be email or sms")
self.challenge_type: str = challenge_type
self.device_token: str = kwargs.pop("device_token", str(uuid.uuid4()))
self.oauth: OAuth = kwargs.pop("ouath", OAuth())
super().__init__(**kwargs)
示例7: _get_proxies
# 需要導入模塊: from urllib import request [as 別名]
# 或者: from urllib.request import getproxies [as 別名]
def _get_proxies(self):
proxies = getproxies()
proxy = {}
if self.proxy:
parsed_proxy = urlparse(self.proxy)
proxy[parsed_proxy.scheme] = parsed_proxy.geturl()
proxies.update(proxy)
return proxies
示例8: make_soup
# 需要導入模塊: from urllib import request [as 別名]
# 或者: from urllib.request import getproxies [as 別名]
def make_soup(url): # pragma: no cover
"""Make soup, that is basically parsing the html document."""
response = requests.get(
url,
headers={'User-agent': 'UIP'},
# gets system proxy (if it is currently using one)
proxies=getproxies())
html = response.content
return BeautifulSoup(html, "html.parser")
示例9: make_json
# 需要導入模塊: from urllib import request [as 別名]
# 或者: from urllib.request import getproxies [as 別名]
def make_json(url): # pragma: no cover
"""Make a dictionary out of a json file."""
response = requests.get(
url,
headers={'User-agent': 'UIP'},
# gets system proxy (if it is currently using one)
proxies=getproxies())
json_file = response.text
data = json.loads(json_file)
return data
示例10: fetch_ktools_tar
# 需要導入模塊: from urllib import request [as 別名]
# 或者: from urllib.request import getproxies [as 別名]
def fetch_ktools_tar(self, location, url, attempts=3, timeout=15, cooldown=1):
last_error = None
proxy_config = urlrequest.getproxies()
self.announce('Retrieving ktools from: {}'.format(url), INFO)
self.announce('Proxy configuration: {}'.format(proxy_config), INFO)
if proxy_config:
# Handle Proxy config
proxy_handler = urlrequest.ProxyHandler(proxy_config)
opener = urlrequest.build_opener(proxy_handler)
urlrequest.install_opener(opener)
for i in range(attempts):
try:
if proxy_config:
# Proxied connection
req = urlrequest.urlopen(urlrequest.Request(url), timeout=timeout)
break
else:
# Non proxied connection
req = urlrequest.urlopen(url, timeout=timeout)
break
except URLError as e:
self.announce('Fetch ktools tar failed: {} (attempt {})'.format(e, (i+1)), WARN)
last_error = e
sleep(cooldown)
else:
self.announce('Failed to get ktools tar after {} attempts'.format(attempts), ERROR)
if last_error:
raise last_error
with open(location, 'wb') as f:
f.write(req.read())
示例11: get_soap_client
# 需要導入模塊: from urllib import request [as 別名]
# 或者: from urllib.request import getproxies [as 別名]
def get_soap_client(wsdlurl, timeout=30): # pragma: no cover (not part of normal test suite)
"""Get a SOAP client for performing requests. The client is cached. The
timeout is in seconds."""
# this function isn't automatically tested because the functions using
# it are not automatically tested
if (wsdlurl, timeout) not in _soap_clients:
# try zeep first
try:
from zeep.transports import Transport
transport = Transport(timeout=timeout)
from zeep import CachingClient
client = CachingClient(wsdlurl, transport=transport).service
except ImportError:
# fall back to non-caching zeep client
try:
from zeep import Client
client = Client(wsdlurl, transport=transport).service
except ImportError:
# other implementations require passing the proxy config
try:
from urllib import getproxies
except ImportError:
from urllib.request import getproxies
# fall back to suds
try:
from suds.client import Client
client = Client(
wsdlurl, proxy=getproxies(), timeout=timeout).service
except ImportError:
# use pysimplesoap as last resort
try:
from pysimplesoap.client import SoapClient
client = SoapClient(
wsdl=wsdlurl, proxy=getproxies(), timeout=timeout)
except ImportError:
raise ImportError(
'No SOAP library (such as zeep) found')
_soap_clients[(wsdlurl, timeout)] = client
return _soap_clients[(wsdlurl, timeout)]
示例12: update_proxy
# 需要導入模塊: from urllib import request [as 別名]
# 或者: from urllib.request import getproxies [as 別名]
def update_proxy(self, proxy, proxy_auth, proxy_from_env):
if proxy_from_env and not proxy:
proxy_url = getproxies().get(self.original_url.scheme)
proxy = URL(proxy_url) if proxy_url else None
if proxy and not proxy.scheme == 'http':
raise ValueError("Only http proxies are supported")
if proxy_auth and not isinstance(proxy_auth, helpers.BasicAuth):
raise ValueError("proxy_auth must be None or BasicAuth() tuple")
self.proxy = proxy
self.proxy_auth = proxy_auth
示例13: get_soap_client
# 需要導入模塊: from urllib import request [as 別名]
# 或者: from urllib.request import getproxies [as 別名]
def get_soap_client(wsdlurl): # pragma: no cover (not part of normal test suite)
"""Get a SOAP client for performing requests. The client is cached."""
# this function isn't automatically tested because the functions using
# it are not automatically tested
if wsdlurl not in _soap_clients:
# try zeep first
try:
from zeep import CachingClient
client = CachingClient(wsdlurl).service
except ImportError:
# fall back to non-caching zeep client
try:
from zeep import Client
client = Client(wsdlurl).service
except ImportError:
# other implementations require passing the proxy config
try:
from urllib import getproxies
except ImportError:
from urllib.request import getproxies
# fall back to suds
try:
from suds.client import Client
client = Client(wsdlurl, proxy=getproxies()).service
except ImportError:
# use pysimplesoap as last resort
from pysimplesoap.client import SoapClient
client = SoapClient(wsdl=wsdlurl, proxy=getproxies())
_soap_clients[wsdlurl] = client
return _soap_clients[wsdlurl]