本文整理汇总了Python中urllib.urlcleanup方法的典型用法代码示例。如果您正苦于以下问题:Python urllib.urlcleanup方法的具体用法?Python urllib.urlcleanup怎么用?Python urllib.urlcleanup使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类urllib
的用法示例。
在下文中一共展示了urllib.urlcleanup方法的10个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: refresh_overview
# 需要导入模块: import urllib [as 别名]
# 或者: from urllib import urlcleanup [as 别名]
def refresh_overview(self):
urllib.urlcleanup()
attempts_remaining = 5
while attempts_remaining > 0:
try:
self.overview = mlbgame.overview(self.current_game().game_id)
self.__update_layout_state()
self.needs_refresh = False
self.print_overview_debug()
self.network_issues = False
break
except URLError, e:
self.network_issues = True
debug.error("Networking Error while refreshing the current overview. {} retries remaining.".format(attempts_remaining))
debug.error("URLError: {}".format(e.reason))
attempts_remaining -= 1
time.sleep(NETWORK_RETRY_SLEEP_TIME)
except ValueError:
self.network_issues = True
debug.error("Value Error while refreshing current overview. {} retries remaining.".format(attempts_remaining))
debug.error("ValueError: Failed to refresh overview for {}".format(self.current_game().game_id))
attempts_remaining -= 1
time.sleep(NETWORK_RETRY_SLEEP_TIME)
# If we run out of retries, just move on to the next game
示例2: download_file
# 需要导入模块: import urllib [as 别名]
# 或者: from urllib import urlcleanup [as 别名]
def download_file(url, path, attempt=0):
try:
urllib.urlretrieve(url, path)
urllib.urlcleanup()
except Exception as e:
if not attempt == 3:
attempt += 1
print("[E] ({:d}) Download failed: {:s}.".format(attempt, str(e)))
print("[W] Trying again in 5 seconds.")
time.sleep(5)
download_file(url, path, attempt)
else:
print("[E] Retry failed three times, skipping file.")
print('-' * 70)
示例3: dash_R_cleanup
# 需要导入模块: import urllib [as 别名]
# 或者: from urllib import urlcleanup [as 别名]
def dash_R_cleanup(fs, ps, pic):
import gc, copy_reg
import _strptime, linecache, dircache
import urlparse, urllib, urllib2, mimetypes, doctest
import struct, filecmp
from distutils.dir_util import _path_created
# Restore some original values.
warnings.filters[:] = fs
copy_reg.dispatch_table.clear()
copy_reg.dispatch_table.update(ps)
sys.path_importer_cache.clear()
sys.path_importer_cache.update(pic)
# Clear assorted module caches.
_path_created.clear()
re.purge()
_strptime._regex_cache.clear()
urlparse.clear_cache()
urllib.urlcleanup()
urllib2.install_opener(None)
dircache.reset()
linecache.clearcache()
mimetypes._default_mime_types()
struct._cache.clear()
filecmp._cache.clear()
doctest.master = None
# Collect cyclic trash.
gc.collect()
示例4: download_file
# 需要导入模块: import urllib [as 别名]
# 或者: from urllib import urlcleanup [as 别名]
def download_file(url, destfile):
"""
download_file: function for download from url to save as destfile
@url the source file to download.
@destfile the destination save file for local.
"""
file_url = url
try:
print("--> Downloading file: %s" % file_url)
filename, msg = urllib.urlretrieve(
#'http://code.jquery.com/jquery-2.1.1.js',
file_url,
reporthook = reporthook)
print ""
#print "File:", filename
print "Header:"
print msg
if os.path.exists(filename):
if os.path.exists(destfile):
now = currenttime()
tmpfile = "%s.%s" % (destfile, now)
shutil.move(destfile, tmpfile)
shutil.move(filename, destfile)
#print 'File exists before cleanup:', os.path.exists(filename)
finally:
urllib.urlcleanup()
#print 'File still exists:', os.path.exists(filename)
示例5: http
# 需要导入模块: import urllib [as 别名]
# 或者: from urllib import urlcleanup [as 别名]
def http(fetch):
"""Decorator for downloading files from HTTP sites."""
@wraps(fetch)
def wrapper(*args, **kwargs):
url, bbox, dt = fetch(*args, **kwargs)
outpath = tempfile.mkdtemp()
filename = url.format(dt.year, dt.month, dt.day)
try:
lfilename = filename.split("/")[-1]
urllib.urlcleanup()
urllib.urlretrieve(filename, "{0}/{1}".format(outpath, lfilename))
except:
lfilename = None
return outpath, lfilename, bbox, dt
return wrapper
示例6: refresh_games
# 需要导入模块: import urllib [as 别名]
# 或者: from urllib import urlcleanup [as 别名]
def refresh_games(self):
debug.log("Updating games for {}/{}/{}".format(self.month, self.day, self.year))
urllib.urlcleanup()
attempts_remaining = 5
while attempts_remaining > 0:
try:
current_day = self.day
self.set_current_date()
all_games = mlbgame.day(self.year, self.month, self.day)
if self.config.rotation_only_preferred:
self.games = self.__filter_list_of_games(all_games, self.config.preferred_teams)
else:
self.games = all_games
if current_day != self.day:
self.current_game_index = self.game_index_for_preferred_team()
self.games_refresh_time = time.time()
self.network_issues = False
break
except URLError, e:
self.network_issues = True
debug.error("Networking error while refreshing the master list of games. {} retries remaining.".format(attempts_remaining))
debug.error("URLError: {}".format(e.reason))
attempts_remaining -= 1
time.sleep(NETWORK_RETRY_SLEEP_TIME)
except ValueError:
self.network_issues = True
debug.error("Value Error while refreshing master list of games. {} retries remaining.".format(attempts_remaining))
debug.error("ValueError: Failed to refresh list of games")
attempts_remaining -= 1
time.sleep(NETWORK_RETRY_SLEEP_TIME)
示例7: fetch_preferred_team_overview
# 需要导入模块: import urllib [as 别名]
# 或者: from urllib import urlcleanup [as 别名]
def fetch_preferred_team_overview(self):
if not self.is_offday_for_preferred_team():
urllib.urlcleanup()
game = self.games[self.game_index_for_preferred_team()]
game_overview = mlbgame.overview(game.game_id)
debug.log("Preferred Team's Game Status: {}, {} {}".format(game_overview.status, game_overview.inning_state, game_overview.inning))
return game_overview
示例8: dash_R_cleanup
# 需要导入模块: import urllib [as 别名]
# 或者: from urllib import urlcleanup [as 别名]
def dash_R_cleanup(fs, ps, pic, zdc, abcs):
import gc, copy_reg
import _strptime, linecache
dircache = test_support.import_module('dircache', deprecated=True)
import urlparse, urllib, urllib2, mimetypes, doctest
import struct, filecmp
from distutils.dir_util import _path_created
# Clear the warnings registry, so they can be displayed again
for mod in sys.modules.values():
if hasattr(mod, '__warningregistry__'):
del mod.__warningregistry__
# Restore some original values.
warnings.filters[:] = fs
copy_reg.dispatch_table.clear()
copy_reg.dispatch_table.update(ps)
sys.path_importer_cache.clear()
sys.path_importer_cache.update(pic)
try:
import zipimport
except ImportError:
pass # Run unmodified on platforms without zipimport support
else:
zipimport._zip_directory_cache.clear()
zipimport._zip_directory_cache.update(zdc)
# clear type cache
sys._clear_type_cache()
# Clear ABC registries, restoring previously saved ABC registries.
for abc, registry in abcs.items():
abc._abc_registry = registry.copy()
abc._abc_cache.clear()
abc._abc_negative_cache.clear()
# Clear assorted module caches.
_path_created.clear()
re.purge()
_strptime._regex_cache.clear()
urlparse.clear_cache()
urllib.urlcleanup()
urllib2.install_opener(None)
dircache.reset()
linecache.clearcache()
mimetypes._default_mime_types()
filecmp._cache.clear()
struct._clearcache()
doctest.master = None
try:
import ctypes
except ImportError:
# Don't worry about resetting the cache if ctypes is not supported
pass
else:
ctypes._reset_cache()
# Collect cyclic trash.
gc.collect()
示例9: get_request
# 需要导入模块: import urllib [as 别名]
# 或者: from urllib import urlcleanup [as 别名]
def get_request(test):
# perform GET request
urllib.urlcleanup() # clear cache
try:
user_agent = 'Mozilla/4.0 (compatible; MSIE 5.5; Windows NT)'
req_headers = { 'User-Agent' : user_agent }
for each in test['headers']:
key, val = each.split(":", 1)
key = key.lstrip()
val = val.lstrip()
req_headers[key] = val
if test['requestCookie'] or test['requestCSRF']:
# request cookie and csrf token if set in module XML
cookie_val, csrf_val = request_value(test)
if cookie_val:
req_headers['cookie'] = cookie_val
if csrf_val:
# replace <CSRFTOKEN> with the collected token
test['url'] = test['url'].replace("<CSRFTOKEN>", csrf_val)
test['postParameters'] = test['postParameters'].replace("<CSRFTOKEN>", csrf_val)
test['headers'] = [h.replace('<CSRFTOKEN>', csrf_val) for h in test['headers']]
if opts.debug:
# print debug output
print textwrap.fill((" [ ] URL (GET): %s" % test['url']),
initial_indent='', subsequent_indent=' -> ', width=80)
print
# assign NullHTTPErrorProcessor as default opener
opener = urllib2.build_opener(NullHTTPErrorProcessor())
urllib2.install_opener(opener)
req = urllib2.Request(test['url'], headers=req_headers)
f = urllib2.urlopen(req)
r_body = f.read()
r_info = f.info()
f.close()
# handle instances where the response body is 0 bytes in length
if not r_body:
print " [" + color['red'] + "!" + color['end'] + "] Zero byte response received from %s" \
% test['name']
r_body = "<Scythe Message: Empty response from server>"
# returned updated test and response data
return test, r_body, r_info, req
except Exception:
print textwrap.fill((" [" + color['red'] + "!" + color['end'] + "] Error contacting %s" \
% test['url']), initial_indent='', subsequent_indent='\t', width=80)
if opts.debug:
for ex in traceback.format_exc().splitlines():
print textwrap.fill((" %s" \
% str(ex)), initial_indent='', subsequent_indent='\t', width=80)
print
return test, False, False, req
示例10: dash_R_cleanup
# 需要导入模块: import urllib [as 别名]
# 或者: from urllib import urlcleanup [as 别名]
def dash_R_cleanup(fs, ps, pic, abcs):
import gc, copy_reg
import _strptime, linecache
dircache = test_support.import_module('dircache', deprecated=True)
import urlparse, urllib, urllib2, mimetypes, doctest
import struct, filecmp
from distutils.dir_util import _path_created
# Clear the warnings registry, so they can be displayed again
for mod in sys.modules.values():
if hasattr(mod, '__warningregistry__'):
del mod.__warningregistry__
# Restore some original values.
warnings.filters[:] = fs
copy_reg.dispatch_table.clear()
copy_reg.dispatch_table.update(ps)
sys.path_importer_cache.clear()
sys.path_importer_cache.update(pic)
# clear type cache
sys._clear_type_cache()
# Clear ABC registries, restoring previously saved ABC registries.
for abc, registry in abcs.items():
abc._abc_registry = registry.copy()
abc._abc_cache.clear()
abc._abc_negative_cache.clear()
# Clear assorted module caches.
_path_created.clear()
re.purge()
_strptime._regex_cache.clear()
urlparse.clear_cache()
urllib.urlcleanup()
urllib2.install_opener(None)
dircache.reset()
linecache.clearcache()
mimetypes._default_mime_types()
filecmp._cache.clear()
struct._clearcache()
doctest.master = None
# Collect cyclic trash.
gc.collect()