当前位置: 首页>>代码示例>>Python>>正文


Python urllib3.disable_warnings方法代码示例

本文整理汇总了Python中requests.packages.urllib3.disable_warnings方法的典型用法代码示例。如果您正苦于以下问题:Python urllib3.disable_warnings方法的具体用法?Python urllib3.disable_warnings怎么用?Python urllib3.disable_warnings使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在requests.packages.urllib3的用法示例。


在下文中一共展示了urllib3.disable_warnings方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: download_tarball

# 需要导入模块: from requests.packages import urllib3 [as 别名]
# 或者: from requests.packages.urllib3 import disable_warnings [as 别名]
def download_tarball(tarball_url, verify=False, proxy_server=None):
    '''
    Downloads a tarball to /tmp and returns the path
    '''
    try:
        if not verify:
            urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
        kwargs = {}
        if proxy_server:
            kwargs['proxies'] = {
                'http': proxy_server,
                'https': proxy_server,
                'ftp': proxy_server
            }
        tarball_filename = tempfile.mkstemp(prefix='armada')[1]
        response = requests.get(tarball_url, verify=verify, **kwargs)

        with open(tarball_filename, 'wb') as f:
            f.write(response.content)

        return tarball_filename
    except Exception:
        raise source_exceptions.TarballDownloadException(tarball_url) 
开发者ID:airshipit,项目名称:armada,代码行数:25,代码来源:source.py

示例2: download_tarball

# 需要导入模块: from requests.packages import urllib3 [as 别名]
# 或者: from requests.packages.urllib3 import disable_warnings [as 别名]
def download_tarball(tarball_url, verify=False):
    '''
    Downloads a tarball to /tmp and returns the path
    '''
    try:
        if not verify:
            urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)

        tarball_filename = tempfile.mkstemp(prefix='armada')[1]
        response = requests.get(tarball_url, verify=verify)

        with open(tarball_filename, 'wb') as f:
            f.write(response.content)

        return tarball_filename
    except Exception:
        raise source_exceptions.TarballDownloadException(tarball_url) 
开发者ID:att-comdev,项目名称:armada,代码行数:19,代码来源:source.py

示例3: get_html

# 需要导入模块: from requests.packages import urllib3 [as 别名]
# 或者: from requests.packages.urllib3 import disable_warnings [as 别名]
def get_html(url,submit_cookies):

    # 设置请求头,模拟人工
    header = {
        'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.102 Safari/537.36',
        'Referer' : 'http://ui.ptlogin2.qq.com/cgi-bin/login?appid=549000912&s_url=http://qun.qq.com/member.html'
    }
    # 屏蔽https证书警告
    urllib3.disable_warnings()

    # 网页访问,get方式
    html = get(url, cookies = submit_cookies, headers=header, verify=False)

    return html


# post访问网页 
开发者ID:shengqiangzhang,项目名称:examples-of-web-crawlers,代码行数:19,代码来源:url_request.py

示例4: get_profile_picture

# 需要导入模块: from requests.packages import urllib3 [as 别名]
# 或者: from requests.packages.urllib3 import disable_warnings [as 别名]
def get_profile_picture(self, qq_number, size=100):
        # 获取指定qq的头像,size的值可为40、100、140,默认为100
        # 屏蔽https证书警告
        urllib3.disable_warnings()

        # 设置请求头,模拟人工
        header = {
            'Accept': 'application/json, text/javascript, */*; q=0.01',
            'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.102 Safari/537.36',
            'Content-Type': 'application/x-www-form-urlencoded; charset=UTF-8',
            'Referer':'http://find.qq.com/'
        }

        # 网页访问,get方式
        html = get('http://q1.qlogo.cn/g?b=qq&nk=' + str(qq_number) + '&s=' + str(size), headers=header, verify=False)
        return html.content 
开发者ID:shengqiangzhang,项目名称:examples-of-web-crawlers,代码行数:18,代码来源:qq_bot.py

示例5: get_quit_of_group

# 需要导入模块: from requests.packages import urllib3 [as 别名]
# 或者: from requests.packages.urllib3 import disable_warnings [as 别名]
def get_quit_of_group(self):
        # 获取最近30天内退出的群
        # 需要提交的数据
        # bkn由参数skey通过另一个加密函数得到
        bkn = hash33_bkn(self.cookies_merge_dict_in_qun_qq_com['skey'])
        submit_data = {'bkn': str(bkn)}

        # 设置请求头,模拟人工
        header = {
            'Accept': 'application/json, text/javascript, */*; q=0.01',
            'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.102 Safari/537.36',
            'Content-Type': 'text/plain',
            'origin': 'https://huifu.qq.com',
            'referer' : 'https://huifu.qq.com/recovery/index.html?frag=0'
        }

        # 屏蔽https证书警告
        urllib3.disable_warnings()
        # 网页访问,post方式
        html = post('https://huifu.qq.com/cgi-bin/gr_grouplist', data=submit_data, cookies=self.cookies_merge_dict_in_qun_qq_com, headers=header, verify=False)

        # 将返回数据解析为python对象
        result = loads(html.text)

        return result 
开发者ID:shengqiangzhang,项目名称:examples-of-web-crawlers,代码行数:27,代码来源:qq_bot.py

示例6: get_pay_for_another

# 需要导入模块: from requests.packages import urllib3 [as 别名]
# 或者: from requests.packages.urllib3 import disable_warnings [as 别名]
def get_pay_for_another(self):
        # 获取帮别人的代付
        # 需要提交的数据
        skey = str(self.cookies_merge_dict_in_qun_qq_com['skey'])
        url = 'https://pay.qq.com/cgi-bin/personal/account_msg.cgi?p=0.6796416908412624&cmd=1&sck=' + get_sck(skey) + '&type=100&showitem=2&per=100&pageno=1&r=0.3177912609760205'

        # 设置请求头,模拟人工
        header = {
            'Accept': 'application/json, text/javascript, */*; q=0.01',
            'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.102 Safari/537.36',
            'Accept-Encoding': 'gzip, deflate',
            'Referer': 'https://pay.qq.com/infocenter/infocenter.shtml?asktype=100',
            'Connection': 'keep-alive'
        }

        # 屏蔽https证书警告
        urllib3.disable_warnings()
        # 网页访问,get方式
        html = get(url, cookies=self.cookies_merge_dict_in_qun_qq_com, headers=header, verify=False)

        # 将返回数据解析为python对象
        result = loads(html.text)
        # print(result)

        return result['resultinfo']['list'] 
开发者ID:shengqiangzhang,项目名称:examples-of-web-crawlers,代码行数:27,代码来源:qq_bot.py

示例7: __init__

# 需要导入模块: from requests.packages import urllib3 [as 别名]
# 或者: from requests.packages.urllib3 import disable_warnings [as 别名]
def __init__(self, base_url, login, password, verify_tls=True, timeout=None, **options):
        self.options = options
        self.key_patterns = {prepare_regex(metric['key']): metric
                             for metric in options.get('metrics', [])}

        self.zapi = pyzabbix.ZabbixAPI(base_url, timeout=timeout)
        if not verify_tls:
            import requests.packages.urllib3 as urllib3
            urllib3.disable_warnings()
            self.zapi.session.verify = verify_tls

        def measure_api_request(r, *args, **kwargs):
            api_requests_total.inc()
            api_bytes_total.inc(len(r.content))
            api_seconds_total.inc(r.elapsed.total_seconds())
        self.zapi.session.hooks = {'response': measure_api_request}

        self.zapi.login(login, password)

        self.host_mapping = {row['hostid']: row['name']
                             for row in self.zapi.host.get(output=['hostid', 'name'])} 
开发者ID:MyBook,项目名称:zabbix-exporter,代码行数:23,代码来源:core.py

示例8: __init__

# 需要导入模块: from requests.packages import urllib3 [as 别名]
# 或者: from requests.packages.urllib3 import disable_warnings [as 别名]
def __init__(self, version, address='https://index.docker.io', **kwargs):
        urllib3.disable_warnings()
        self._instances[id(self)] = self
        self.scheme       = utils.parse_http_scheme(address)
        self.location     = utils.parse_hostname(address)
        self._api_version = version
        self._tls         = {}

        if kwargs.get('ssl_cert_path'):
            self.tls = kwargs['ssl_cert_path']

        # prepare session
        self.session = requests.Session()

        # set up certs.
        self.session.verify = self.tls.get('ca_path', kwargs.get('verify', True))
        self.session.cert = (self.tls['ssl_cert_path'], self.tls['ssl_key_path']) if self.tls else None
        self.auth = kwargs.get('authentication', kwargs.get('auth', None))
        if self.auth:
            self.session.auth = (self.auth.user, self.auth.passwd) 
开发者ID:TUNE-Archive,项目名称:freight_forwarder,代码行数:22,代码来源:registry_base.py

示例9: __init__

# 需要导入模块: from requests.packages import urllib3 [as 别名]
# 或者: from requests.packages.urllib3 import disable_warnings [as 别名]
def __init__(self):
        self.state = Session.STATE_INIT
        self.credentials = None
        self.session = None
        self.auth = None
        self.retrying = False	# Avoid infinite loop when successful auth / unsuccessful query

        # yuck suppress InsecurePlatformWarning under Python < 2.7.9 which lacks SNI support
        if sys.version_info < (2,7,9):
            from requests.packages import urllib3
            urllib3.disable_warnings()

        if getattr(sys, 'frozen', False):
            os.environ['REQUESTS_CA_BUNDLE'] = join(config.respath, 'cacert.pem') 
开发者ID:EDCD,项目名称:EDMarketConnector,代码行数:16,代码来源:companion.py

示例10: init_session

# 需要导入模块: from requests.packages import urllib3 [as 别名]
# 或者: from requests.packages.urllib3 import disable_warnings [as 别名]
def init_session(cls):
        if "Authorization" not in cls._session.headers and config.api_token:
            cls._session.headers.update({"Authorization": "Bearer {}".format(config.api_token)})
        if cls._session.cert is None and config.cert:
            cls._session.cert = config.cert
        cls._session.verify = config.verify_ssl
        if not config.verify_ssl:
            import requests.packages.urllib3 as urllib3
            urllib3.disable_warnings() 
开发者ID:fiaas,项目名称:k8s,代码行数:11,代码来源:client.py

示例11: disable_ssl_cert_check

# 需要导入模块: from requests.packages import urllib3 [as 别名]
# 或者: from requests.packages.urllib3 import disable_warnings [as 别名]
def disable_ssl_cert_check(self):
        urllib3.disable_warnings(InsecureRequestWarning)
        self.s.verify = False 
开发者ID:sparkcognition,项目名称:darwin-sdk,代码行数:5,代码来源:sdk.py

示例12: post_html

# 需要导入模块: from requests.packages import urllib3 [as 别名]
# 或者: from requests.packages.urllib3 import disable_warnings [as 别名]
def post_html(url,submit_cookies,submit_data):

    # 设置请求头,模拟人工
    header = {
        'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.102 Safari/537.36',
        'Referer' : 'https://qun.qq.com/member.html'
    }
    # 屏蔽https证书警告
    urllib3.disable_warnings()

    # 网页访问,post方式
    html = post(url, data=submit_data, cookies = submit_cookies, headers=header, verify=False)

    return html 
开发者ID:shengqiangzhang,项目名称:examples-of-web-crawlers,代码行数:16,代码来源:url_request.py

示例13: get_info_in_qq_friend

# 需要导入模块: from requests.packages import urllib3 [as 别名]
# 或者: from requests.packages.urllib3 import disable_warnings [as 别名]
def get_info_in_qq_friend(self,qq_number):

        # 获取某个qq好友的详细信息

        # 需要提交的数据
        # bkn由参数skey通过另一个加密函数得到
        bkn = hash33_bkn(self.cookies_merge_dict_in_qun_qq_com['skey'])
        submit_data = {'keyword':str(qq_number), 'ldw': str(bkn), 'num':'20', 'page':'0', 'sessionid':'0', 'agerg':'0', 'sex':'0', 'firston':'0', 'video':'0', 'country':'1', 'province':'65535', 'city':'0', 'district':'0', 'hcountry':'1', 'hprovince':'0', 'hcity':'0', 'hdistrict':'0', 'online':'0'}

        # 需要提交的cookies
        # cookies = {'uin':self.cookies_merge_dict_in_qun_qq_com['uin'], 'skey':self.cookies_merge_dict_in_qun_qq_com['skey'], 'ptisp':self.cookies_merge_dict_in_qun_qq_com['ptisp'], 'RK':self.cookies_merge_dict_in_qun_qq_com['RK'], 'ptcz':self.cookies_merge_dict_in_qun_qq_com['ptcz']}

        # 设置请求头,模拟人工
        header = {
            'Accept': 'application/json, text/javascript, */*; q=0.01',
            'Origin': 'http://find.qq.com',
            'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.102 Safari/537.36',
            'Content-Type': 'application/x-www-form-urlencoded; charset=UTF-8',
            'Referer':'http://find.qq.com/',
        }

        # 屏蔽https证书警告
        urllib3.disable_warnings()
        # 网页访问,post方式
        html = post('http://cgi.find.qq.com/qqfind/buddy/search_v3', data=submit_data, cookies=self.cookies_merge_dict_in_qun_qq_com, headers=header, verify=False)

        # 将好友信息解析为python对象
        friend_info = loads(html.text)
        # print(friend_info)
        return friend_info['result']['buddy']['info_list'][0] 
开发者ID:shengqiangzhang,项目名称:examples-of-web-crawlers,代码行数:32,代码来源:qq_bot.py

示例14: is_vip_svip

# 需要导入模块: from requests.packages import urllib3 [as 别名]
# 或者: from requests.packages.urllib3 import disable_warnings [as 别名]
def is_vip_svip(self):
        # 判断此次登录的qq是否为vip或者svip
        # 需要提交的数据
        # bkn由参数skey通过另一个加密函数得到
        bkn = hash33_bkn(self.cookies_merge_dict_in_qun_qq_com['skey'])
        qq_number = str(self.qq_number)
        skey = str(self.cookies_merge_dict_in_qun_qq_com['skey'])
        url = 'https://proxy.vip.qq.com/cgi-bin/srfentry.fcgi?bkn=' + str(bkn) + '&ts=&g_tk=' + str(bkn) + '&data={"11053":{"iAppId":1,"iKeyType":1,"sClientIp":"","sSessionKey":"' + skey + '","sUin":"' + qq_number + '"}}'

        # 设置请求头,模拟人工
        header = {
            'Accept': 'application/json, text/javascript, */*; q=0.01',
            'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.102 Safari/537.36',
            'Accept-Encoding': 'gzip, deflate',
            'Referer': 'https://huifu.qq.com/recovery/index.html?frag=1',
            'Origin': 'https://huifu.qq.com',
            'Connection': 'close'
        }

        # 屏蔽https证书警告
        urllib3.disable_warnings()
        # 网页访问,post方式
        html = get(url, cookies=self.cookies_merge_dict_in_qun_qq_com, headers=header, verify=False)

        # 将返回数据解析为python对象
        result = loads(html.text)
        isSvip = result['11053']['data']['isSvip']
        isVip = result['11053']['data']['isVip']
        return {'isSvip':isSvip, 'isVip':isVip} 
开发者ID:shengqiangzhang,项目名称:examples-of-web-crawlers,代码行数:31,代码来源:qq_bot.py

示例15: get_qb

# 需要导入模块: from requests.packages import urllib3 [as 别名]
# 或者: from requests.packages.urllib3 import disable_warnings [as 别名]
def get_qb(self):
        # 获取该账户的qb值
        # 需要提交的数据
        qq_number = str(self.qq_number)
        skey = str(self.cookies_merge_dict_in_qun_qq_com['skey'])
        url = 'https://api.unipay.qq.com/v1/r/1450000186/wechat_query?cmd=4&pf=vip_m-pay_html5-html5&pfkey=pfkey&from_h5=1&from_https=1&openid=' + qq_number + '&openkey=' + skey + '&session_id=uin&session_type=skey'

        # 设置请求头,模拟人工
        header = {
            'Accept': 'application/json, text/javascript, */*; q=0.01',
            'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.102 Safari/537.36',
            'Accept-Encoding': 'gzip, deflate',
            'Referer': 'https://my.pay.qq.com/account/index.shtml',
            'Origin': 'https://my.pay.qq.com',
            'Connection': 'close'
        }

        # 屏蔽https证书警告
        urllib3.disable_warnings()
        # 网页访问,get方式
        html = get(url, cookies=self.cookies_merge_dict_in_qun_qq_com, headers=header, verify=False)

        # 将返回数据解析为python对象
        result = loads(html.text)

        qb_value = float(result['qb_balance']) / 10
        return qb_value 
开发者ID:shengqiangzhang,项目名称:examples-of-web-crawlers,代码行数:29,代码来源:qq_bot.py


注:本文中的requests.packages.urllib3.disable_warnings方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。