本文整理汇总了Python中pywikibot.comms.http.fetch函数的典型用法代码示例。如果您正苦于以下问题:Python fetch函数的具体用法?Python fetch怎么用?Python fetch使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了fetch函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: test_fetch
def test_fetch(self):
"""Test that using the data parameter and body parameter produce same results."""
r_data = http.fetch(uri=self.get_httpbin_url('/post'), method='POST',
data={'fish&chips': 'delicious'})
r_body = http.fetch(uri=self.get_httpbin_url('/post'), method='POST',
body={'fish&chips': 'delicious'})
self.assertDictEqual(json.loads(r_data.content),
json.loads(r_body.content))
示例2: test_follow_redirects
def test_follow_redirects(self):
"""Test follow 301 redirects correctly."""
# The following will redirect from ' ' -> '_', and maybe to https://
r = http.fetch(uri='http://en.wikipedia.org/wiki/Main%20Page')
self.assertEqual(r.status, 200)
self.assertIsNotNone(r.data.history)
self.assertIn('//en.wikipedia.org/wiki/Main_Page',
r.data.url)
r = http.fetch(uri='http://www.gandi.eu')
self.assertEqual(r.status, 200)
self.assertEqual(r.data.url,
'http://www.gandi.net')
示例3: getWebCitationURL
def getWebCitationURL(url, timestamp=None):
"""Return archived URL by Web Citation.
See http://www.webcitation.org/doc/WebCiteBestPracticesGuide.pdf
for more details
@param url: url to search an archived version for
@param timestamp: requested archive date. The version closest to that
moment is returned. Format: YYYYMMDDhhmmss or part thereof.
"""
uri = u'http://www.webcitation.org/query?'
query = {'returnxml': 'true',
'url': url}
if timestamp is not None:
query['date'] = timestamp
uri = uri + urlencode(query)
xmltext = http.fetch(uri).content
if "success" in xmltext:
data = ET.fromstring(xmltext)
return data.find('.//webcite_url').text
else:
return None
示例4: langs
def langs(self):
"""Build interwikimap."""
response = fetch(self.api + "?action=query&meta=siteinfo&siprop=interwikimap&sifilteriw=local&format=json")
iw = json.loads(response.content)
if "error" in iw:
raise RuntimeError("%s - %s" % (iw["error"]["code"], iw["error"]["info"]))
return [wiki for wiki in iw["query"]["interwikimap"] if "language" in wiki]
示例5: _ocr_callback
def _ocr_callback(self, cmd_uri, parser_func=None):
"""OCR callback function.
@return: tuple (error, text [error description in case of error]).
"""
def id(x):
return x
if not cmd_uri:
raise ValueError('Parameter cmd_uri is mandatory.')
if parser_func is None:
parser_func = id
if not callable(parser_func):
raise TypeError('Keyword parser_func must be callable.')
# wrong link fail with Exceptions
try:
response = http.fetch(cmd_uri, charset='utf-8')
except Exception as e:
pywikibot.error('Querying %s: %s' % (cmd_uri, e))
return (True, e)
data = json.loads(response.content)
assert 'error' in data, 'Error from phe-tools: %s' % data
assert data['error'] in [0, 1], 'Error from phe-tools: %s' % data
error = bool(data['error'])
if error:
pywikibot.error('Querying %s: %s' % (cmd_uri, data['text']))
return (error, data['text'])
else:
return (error, parser_func(data['text']))
示例6: getInternetArchiveURL
def getInternetArchiveURL(url, timestamp=None):
"""Return archived URL by Internet Archive.
See [[:mw:Archived Pages]] and https://archive.org/help/wayback_api.php
for more details.
@param url: url to search an archived version for
@param timestamp: requested archive date. The version closest to that
moment is returned. Format: YYYYMMDDhhmmss or part thereof.
"""
import json
uri = u'https://archive.org/wayback/available?'
query = {'url': url}
if timestamp is not None:
query['timestamp'] = timestamp
uri = uri + urlencode(query)
jsontext = http.fetch(uri).content
if "closest" in jsontext:
data = json.loads(jsontext)
return data['archived_snapshots']['closest']['url']
else:
return None
示例7: test_https_cert_error
def test_https_cert_error(self):
"""Test if http.fetch respects disable_ssl_certificate_validation."""
self.assertRaisesRegex(pywikibot.FatalServerError, self.CERT_VERIFY_FAILED_RE,
http.fetch,
uri='https://testssl-expire-r2i2.disig.sk/index.en.html')
http.session.close() # clear the connection
with warnings.catch_warnings(record=True) as warning_log:
response = http.fetch(
uri='https://testssl-expire-r2i2.disig.sk/index.en.html',
disable_ssl_certificate_validation=True)
r = response.content
self.assertIsInstance(r, unicode)
self.assertTrue(re.search(r'<title>.*</title>', r))
http.session.close() # clear the connection
# Verify that it now fails again
self.assertRaisesRegex(pywikibot.FatalServerError, self.CERT_VERIFY_FAILED_RE,
http.fetch,
uri='https://testssl-expire-r2i2.disig.sk/index.en.html')
http.session.close() # clear the connection
# Verify that the warning occurred
self.assertIn('InsecureRequestWarning',
[w.category.__name__ for w in warning_log])
示例8: test_no_params
def test_no_params(self):
"""Test fetch method with no parameters."""
r = http.fetch(uri=self.get_httpbin_url('/get'), params={})
self.assertEqual(r.status, 200)
content = json.loads(r.content)
self.assertDictEqual(content['args'], {})
示例9: getOpenStreetMap
def getOpenStreetMap(latitude, longitude):
"""
Get the result from https://nominatim.openstreetmap.org/reverse .
@rtype: list of tuples
"""
result = []
gotInfo = False
parameters = urlencode({'lat': latitude, 'lon': longitude, 'accept-language': 'en'})
while not gotInfo:
try:
page = fetch('https://nominatim.openstreetmap.org/reverse?format=xml&%s' % parameters)
et = xml.etree.ElementTree.fromstring(page.content)
gotInfo = True
except IOError:
pywikibot.output(u'Got an IOError, let\'s try again')
time.sleep(30)
except socket.timeout:
pywikibot.output(u'Got a timeout, let\'s try again')
time.sleep(30)
validParts = [u'hamlet', u'village', u'city', u'county', u'country']
invalidParts = [u'path', u'road', u'suburb', u'state', u'country_code']
addressparts = et.find('addressparts')
for addresspart in addressparts.getchildren():
if addresspart.tag in validParts:
result.append(addresspart.text)
elif addresspart.tag in invalidParts:
pywikibot.output(u'Dropping %s, %s' % (addresspart.tag, addresspart.text))
else:
pywikibot.warning('%s, %s is not in addressparts lists'
% (addresspart.tag, addresspart.text))
return result
示例10: getDataFromHost
def getDataFromHost(self, queryStr):
"""
Go and fetch a query from the host's API.
@rtype: dict
"""
url = self.getUrl(queryStr)
try:
resp = http.fetch(url)
except:
pywikibot.warning(u"Failed to retrieve %s" % url)
raise
data = resp.content
if not data:
pywikibot.warning('No data received for %s' % url)
raise pywikibot.ServerError('No data received for %s' % url)
try:
data = json.loads(data)
except ValueError:
pywikibot.warning(
'Data received for %s but no JSON could be decoded: %r'
% (url, data))
raise pywikibot.ServerError(
'Data received for %s but no JSON could be decoded: %r'
% (url, data))
return data
示例11: github_svn_rev2hash
def github_svn_rev2hash(tag, rev):
"""Convert a Subversion revision to a Git hash using Github.
@param tag: name of the Subversion repo on Github
@param rev: Subversion revision identifier
@return: the git hash
@rtype: str
"""
from io import StringIO
import xml.dom.minidom
from pywikibot.comms import http
uri = 'https://github.com/wikimedia/%s/!svn/vcc/default' % tag
request = http.fetch(uri=uri, method='PROPFIND',
body="<?xml version='1.0' encoding='utf-8'?>"
"<propfind xmlns=\"DAV:\"><allprop/></propfind>",
headers={'label': str(rev),
'user-agent': 'SVN/1.7.5 {pwb}'})
data = request.content
dom = xml.dom.minidom.parse(StringIO(data))
hsh = dom.getElementsByTagName("C:git-commit")[0].firstChild.nodeValue
date = dom.getElementsByTagName("S:date")[0].firstChild.nodeValue
date = time.strptime(date[:19], '%Y-%m-%dT%H:%M:%S')
return hsh, date
示例12: test_follow_redirects
def test_follow_redirects(self):
"""Test follow 301 redirects after an exception works correctly."""
# to be effective, this exception should be raised in httplib2
self.assertRaises(Exception,
http.fetch,
uri='invalid://url')
# The following will redirect from ' ' -> '_', and maybe to https://
r = http.fetch(uri='http://en.wikipedia.org/wiki/Main%20Page')
self.assertEqual(r.status, 200)
self.assertIn('//en.wikipedia.org/wiki/Main_Page',
r.response_headers['content-location'])
r = http.fetch(uri='http://www.gandi.eu')
self.assertEqual(r.status, 200)
self.assertEqual(r.response_headers['content-location'],
'http://www.gandi.net')
示例13: test_fetch
def test_fetch(self):
"""Test http.fetch using http://www.wikipedia.org/."""
r = http.fetch('http://www.wikipedia.org/')
self.assertIsInstance(r, threadedhttp.HttpRequest)
self.assertEqual(r.status, 200)
self.assertIn('<html lang="mul"', r.content)
self.assertIsInstance(r.content, unicode)
self.assertIsInstance(r.raw, bytes)
示例14: get_image_from_image_page
def get_image_from_image_page(imagePage):
"""Get the image object to work based on an imagePage object."""
imageBuffer = None
imageURL = imagePage.fileUrl()
imageURLopener = http.fetch(imageURL)
imageBuffer = io.BytesIO(imageURLopener.raw[:])
image = Image.open(imageBuffer)
return image
示例15: downloadPhoto
def downloadPhoto(photoUrl=''):
"""
Download the photo and store it in a io.BytesIO object.
TODO: Add exception handling
"""
imageFile = fetch(photoUrl).raw
return io.BytesIO(imageFile)