当前位置: 首页>>代码示例>>Python>>正文


Python request.getproxies方法代码示例

本文整理汇总了Python中urllib.request.getproxies方法的典型用法代码示例。如果您正苦于以下问题:Python request.getproxies方法的具体用法?Python request.getproxies怎么用?Python request.getproxies使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在urllib.request的用法示例。


在下文中一共展示了request.getproxies方法的13个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: _make_pool

# 需要导入模块: from urllib import request [as 别名]
# 或者: from urllib.request import getproxies [as 别名]
def _make_pool(
        self,
        parsed_dsn,  # type: Dsn
        http_proxy,  # type: Optional[str]
        https_proxy,  # type: Optional[str]
        ca_certs,  # type: Optional[Any]
    ):
        # type: (...) -> Union[PoolManager, ProxyManager]
        proxy = None

        # try HTTPS first
        if parsed_dsn.scheme == "https" and (https_proxy != ""):
            proxy = https_proxy or getproxies().get("https")

        # maybe fallback to HTTP proxy
        if not proxy and (http_proxy != ""):
            proxy = http_proxy or getproxies().get("http")

        opts = self._get_pool_options(ca_certs)

        if proxy:
            return urllib3.ProxyManager(proxy, **opts)
        else:
            return urllib3.PoolManager(**opts) 
开发者ID:getsentry,项目名称:sentry-python,代码行数:26,代码来源:transport.py

示例2: proxies_from_env

# 需要导入模块: from urllib import request [as 别名]
# 或者: from urllib.request import getproxies [as 别名]
def proxies_from_env() -> Dict[str, ProxyInfo]:
    proxy_urls = {k: URL(v) for k, v in getproxies().items()
                  if k in ('http', 'https')}
    netrc_obj = netrc_from_env()
    stripped = {k: strip_auth_from_url(v) for k, v in proxy_urls.items()}
    ret = {}
    for proto, val in stripped.items():
        proxy, auth = val
        if proxy.scheme == 'https':
            client_logger.warning(
                "HTTPS proxies %s are not supported, ignoring", proxy)
            continue
        if netrc_obj and auth is None:
            auth_from_netrc = None
            if proxy.host is not None:
                auth_from_netrc = netrc_obj.authenticators(proxy.host)
            if auth_from_netrc is not None:
                # auth_from_netrc is a (`user`, `account`, `password`) tuple,
                # `user` and `account` both can be username,
                # if `user` is None, use `account`
                *logins, password = auth_from_netrc
                login = logins[0] if logins[0] else logins[-1]
                auth = BasicAuth(cast(str, login), cast(str, password))
        ret[proto] = ProxyInfo(proxy, auth)
    return ret 
开发者ID:TouwaStar,项目名称:Galaxy_Plugin_Bethesda,代码行数:27,代码来源:helpers.py

示例3: get_proxy_url

# 需要导入模块: from urllib import request [as 别名]
# 或者: from urllib.request import getproxies [as 别名]
def get_proxy_url(url):
    proxies = getproxies()
    url_parts = parse_url_config(url)

    proxy_keys = [
        url_parts['scheme'] + '://' + url_parts['netloc'],
        url_parts['scheme'],
        'all://' + url_parts['netloc'],
        'all'
    ]

    for key in proxy_keys:
        if key in proxies:
            return proxies[key]

    return None 
开发者ID:cloud-custodian,项目名称:cloud-custodian,代码行数:18,代码来源:utils.py

示例4: LyricWikia

# 需要导入模块: from urllib import request [as 别名]
# 或者: from urllib.request import getproxies [as 别名]
def LyricWikia(artist, title):
    proxy = request.getproxies()
    url = 'http://lyrics.wikia.com/api.php?action=lyrics&artist={artist}&song={title}&fmt=json&func=getSong'.format(
        artist=artist, title=title).replace(" ", "%20")
    r = requests.get(url, timeout=15, proxies=proxy)
    # We got some bad formatted JSON data... So we need to fix stuff :/
    returned = r.text
    returned = returned.replace("\'", "\"")
    returned = returned.replace("song = ", "")
    returned = json.loads(returned)
    if returned["lyrics"] != "Not found":
        # set the url to the url we just received, and retrieving it
        timed = True
        url = returned["url"] + "/lrc"
        r = requests.get(url, timeout=15, proxies=proxy)
        if r.status_code == 404:
            timed = False
            url = returned["url"]
            r = requests.get(url, timeout=15, proxies=proxy)
        soup = BeautifulSoup(r.text, 'html.parser')
        soup = soup.find("div", {"class": "lyricbox"})
        [elem.extract() for elem in soup.findAll('div')]
        [elem.replaceWith('\n') for elem in soup.findAll('br')]
        # with old BeautifulSoup the following is needed..? For recent versions, this isn't needed/doesn't work
        try:
            # soup = BeautifulSoup(str(soup), convertEntities=BeautifulSoup.HTML_ENTITIES)
            soup = BeautifulSoup(str(soup), 'html.parser')
        except:
            pass
        soup = BeautifulSoup(re.sub(r'(<!--[.\s\S]*-->)', '', str(soup)), 'html.parser')
        [elem.extract() for elem in soup.findAll('script')]
        return soup.getText(), url, timed
    else:
        return "error", "", False 
开发者ID:SimonIT,项目名称:spotifylyrics,代码行数:36,代码来源:lyrics.py

示例5: check_version

# 需要导入模块: from urllib import request [as 别名]
# 或者: from urllib.request import getproxies [as 别名]
def check_version() -> bool:
    proxy = request.getproxies()
    try:
        return get_version() >= \
               float(requests.get("https://api.github.com/repos/SimonIT/spotifylyrics/tags", timeout=5, proxies=proxy)
                     .json()[0]["name"])
    except Exception:
        return True 
开发者ID:SimonIT,项目名称:spotifylyrics,代码行数:10,代码来源:backend.py

示例6: __init__

# 需要导入模块: from urllib import request [as 别名]
# 或者: from urllib.request import getproxies [as 别名]
def __init__(
        self,
        username: str,
        password: str,
        challenge_type: Optional[str] = "email",
        headers: Optional[CaseInsensitiveDictType] = None,
        proxies: Optional[Proxies] = None,
        **kwargs: Any,
    ) -> None:
        self.session: requests.Session = requests.session()
        self.session.headers = HEADERS if headers is None else headers
        self.session.proxies = getproxies() if proxies is None else proxies
        self.session.verify = certifi.where()
        self.expires_at = datetime.strptime("1970", "%Y").replace(
            tzinfo=pytz.UTC
        )  # some time in the past

        self.username: str = username
        self.password: str = password
        if challenge_type not in ["email", "sms"]:
            raise ValueError("challenge_type must be email or sms")
        self.challenge_type: str = challenge_type

        self.device_token: str = kwargs.pop("device_token", str(uuid.uuid4()))
        self.oauth: OAuth = kwargs.pop("ouath", OAuth())

        super().__init__(**kwargs) 
开发者ID:robinhood-unofficial,项目名称:pyrh,代码行数:29,代码来源:sessionmanager.py

示例7: _get_proxies

# 需要导入模块: from urllib import request [as 别名]
# 或者: from urllib.request import getproxies [as 别名]
def _get_proxies(self):
        proxies = getproxies()

        proxy = {}
        if self.proxy:
            parsed_proxy = urlparse(self.proxy)
            proxy[parsed_proxy.scheme] = parsed_proxy.geturl()

        proxies.update(proxy)
        return proxies 
开发者ID:ncrocfer,项目名称:clf,代码行数:12,代码来源:api.py

示例8: make_soup

# 需要导入模块: from urllib import request [as 别名]
# 或者: from urllib.request import getproxies [as 别名]
def make_soup(url):  # pragma: no cover
    """Make soup, that is basically parsing the html document."""
    response = requests.get(
        url,
        headers={'User-agent': 'UIP'},
        # gets system proxy (if it is currently using one)
        proxies=getproxies())

    html = response.content
    return BeautifulSoup(html, "html.parser") 
开发者ID:NITDgpOS,项目名称:UIP,代码行数:12,代码来源:scrape.py

示例9: make_json

# 需要导入模块: from urllib import request [as 别名]
# 或者: from urllib.request import getproxies [as 别名]
def make_json(url):  # pragma: no cover
    """Make a dictionary out of a json file."""
    response = requests.get(
        url,
        headers={'User-agent': 'UIP'},
        # gets system proxy (if it is currently using one)
        proxies=getproxies())

    json_file = response.text
    data = json.loads(json_file)
    return data 
开发者ID:NITDgpOS,项目名称:UIP,代码行数:13,代码来源:scrape.py

示例10: fetch_ktools_tar

# 需要导入模块: from urllib import request [as 别名]
# 或者: from urllib.request import getproxies [as 别名]
def fetch_ktools_tar(self, location, url, attempts=3, timeout=15, cooldown=1):
        last_error = None
        proxy_config = urlrequest.getproxies()
        self.announce('Retrieving ktools from: {}'.format(url), INFO)
        self.announce('Proxy configuration: {}'.format(proxy_config), INFO)

        if proxy_config:
            # Handle Proxy config
            proxy_handler = urlrequest.ProxyHandler(proxy_config)
            opener = urlrequest.build_opener(proxy_handler)
            urlrequest.install_opener(opener)

        for i in range(attempts):
            try:
                if proxy_config:
                    # Proxied connection
                    req = urlrequest.urlopen(urlrequest.Request(url), timeout=timeout)
                    break
                else:
                    # Non proxied connection
                    req = urlrequest.urlopen(url, timeout=timeout)
                    break

            except URLError as e:
                self.announce('Fetch ktools tar failed: {} (attempt {})'.format(e, (i+1)), WARN)
                last_error = e
                sleep(cooldown)
        else:
            self.announce('Failed to get ktools tar after {} attempts'.format(attempts), ERROR)
            if last_error:
                raise last_error

        with open(location, 'wb') as f:
            f.write(req.read()) 
开发者ID:OasisLMF,项目名称:OasisLMF,代码行数:36,代码来源:setup.py

示例11: get_soap_client

# 需要导入模块: from urllib import request [as 别名]
# 或者: from urllib.request import getproxies [as 别名]
def get_soap_client(wsdlurl, timeout=30):  # pragma: no cover (not part of normal test suite)
    """Get a SOAP client for performing requests. The client is cached. The
    timeout is in seconds."""
    # this function isn't automatically tested because the functions using
    # it are not automatically tested
    if (wsdlurl, timeout) not in _soap_clients:
        # try zeep first
        try:
            from zeep.transports import Transport
            transport = Transport(timeout=timeout)
            from zeep import CachingClient
            client = CachingClient(wsdlurl, transport=transport).service
        except ImportError:
            # fall back to non-caching zeep client
            try:
                from zeep import Client
                client = Client(wsdlurl, transport=transport).service
            except ImportError:
                # other implementations require passing the proxy config
                try:
                    from urllib import getproxies
                except ImportError:
                    from urllib.request import getproxies
                # fall back to suds
                try:
                    from suds.client import Client
                    client = Client(
                        wsdlurl, proxy=getproxies(), timeout=timeout).service
                except ImportError:
                    # use pysimplesoap as last resort
                    try:
                        from pysimplesoap.client import SoapClient
                        client = SoapClient(
                            wsdl=wsdlurl, proxy=getproxies(), timeout=timeout)
                    except ImportError:
                        raise ImportError(
                            'No SOAP library (such as zeep) found')
        _soap_clients[(wsdlurl, timeout)] = client
    return _soap_clients[(wsdlurl, timeout)] 
开发者ID:guohuadeng,项目名称:odoo13-x64,代码行数:41,代码来源:util.py

示例12: update_proxy

# 需要导入模块: from urllib import request [as 别名]
# 或者: from urllib.request import getproxies [as 别名]
def update_proxy(self, proxy, proxy_auth, proxy_from_env):
        if proxy_from_env and not proxy:
            proxy_url = getproxies().get(self.original_url.scheme)
            proxy = URL(proxy_url) if proxy_url else None
        if proxy and not proxy.scheme == 'http':
            raise ValueError("Only http proxies are supported")
        if proxy_auth and not isinstance(proxy_auth, helpers.BasicAuth):
            raise ValueError("proxy_auth must be None or BasicAuth() tuple")
        self.proxy = proxy
        self.proxy_auth = proxy_auth 
开发者ID:skylander86,项目名称:lambda-text-extractor,代码行数:12,代码来源:client_reqrep.py

示例13: get_soap_client

# 需要导入模块: from urllib import request [as 别名]
# 或者: from urllib.request import getproxies [as 别名]
def get_soap_client(wsdlurl):  # pragma: no cover (not part of normal test suite)
    """Get a SOAP client for performing requests. The client is cached."""
    # this function isn't automatically tested because the functions using
    # it are not automatically tested
    if wsdlurl not in _soap_clients:
        # try zeep first
        try:
            from zeep import CachingClient
            client = CachingClient(wsdlurl).service
        except ImportError:
            # fall back to non-caching zeep client
            try:
                from zeep import Client
                client = Client(wsdlurl).service
            except ImportError:
                # other implementations require passing the proxy config
                try:
                    from urllib import getproxies
                except ImportError:
                    from urllib.request import getproxies
                # fall back to suds
                try:
                    from suds.client import Client
                    client = Client(wsdlurl, proxy=getproxies()).service
                except ImportError:
                    # use pysimplesoap as last resort
                    from pysimplesoap.client import SoapClient
                    client = SoapClient(wsdl=wsdlurl, proxy=getproxies())
        _soap_clients[wsdlurl] = client
    return _soap_clients[wsdlurl] 
开发者ID:guohuadeng,项目名称:odoo12-x64,代码行数:32,代码来源:util.py


注:本文中的urllib.request.getproxies方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。