本文整理汇总了Python中urllib.parse.parse_url函数的典型用法代码示例。如果您正苦于以下问题:Python parse_url函数的具体用法?Python parse_url怎么用?Python parse_url使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了parse_url函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: do_GET
def do_GET(self):
request = parse_url(self.path).path.split('/')[1]
query_vars = dict(parse_query(parse_url(self.path).query))
try:
if self.path not in response_cache:
info_getter = getattr(info, 'get_' + request)
response_cache[self.path] = info_getter(**query_vars)
self.respond(response_cache[self.path])
except AttributeError:
self.not_found()
except NotImplementedError:
self.not_implemented()
示例2: get_filepath_or_buffer
def get_filepath_or_buffer(filepath_or_buffer, encoding=None,
compression=None):
# Assuming AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY and AWS_S3_HOST
# are environment variables
parsed_url = parse_url(filepath_or_buffer)
s3_host = os.environ.get('AWS_S3_HOST', 's3.amazonaws.com')
try:
conn = boto.connect_s3(host=s3_host)
except boto.exception.NoAuthHandlerFound:
conn = boto.connect_s3(host=s3_host, anon=True)
b = conn.get_bucket(parsed_url.netloc, validate=False)
if compat.PY2 and (compression == 'gzip' or
(compression == 'infer' and
filepath_or_buffer.endswith(".gz"))):
k = boto.s3.key.Key(b, parsed_url.path)
filepath_or_buffer = BytesIO(k.get_contents_as_string(
encoding=encoding))
else:
k = BotoFileLikeReader(b, parsed_url.path, encoding=encoding)
k.open('r') # Expose read errors immediately
filepath_or_buffer = k
return filepath_or_buffer, None, compression
示例3: get_filepath_or_buffer
def get_filepath_or_buffer(filepath_or_buffer, encoding=None):
"""
If the filepath_or_buffer is a url, translate and return the buffer
passthru otherwise.
Parameters
----------
filepath_or_buffer : a url, filepath, or buffer
encoding : the encoding to use to decode py3 bytes, default is 'utf-8'
Returns
-------
a filepath_or_buffer, the encoding
"""
if _is_url(filepath_or_buffer):
req = _urlopen(str(filepath_or_buffer))
return maybe_read_encoded_stream(req,encoding)
if _is_s3_url(filepath_or_buffer):
try:
import boto
except:
raise ImportError("boto is required to handle s3 files")
# Assuming AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY
# are environment variables
parsed_url = parse_url(filepath_or_buffer)
conn = boto.connect_s3()
b = conn.get_bucket(parsed_url.netloc)
k = boto.s3.key.Key(b)
k.key = parsed_url.path
filepath_or_buffer = StringIO(k.get_contents_as_string())
return filepath_or_buffer, None
return filepath_or_buffer, None
示例4: get_filepath_or_buffer
def get_filepath_or_buffer(filepath_or_buffer, encoding=None,
compression=None):
"""
If the filepath_or_buffer is a url, translate and return the buffer
passthru otherwise.
Parameters
----------
filepath_or_buffer : a url, filepath, or buffer
encoding : the encoding to use to decode py3 bytes, default is 'utf-8'
Returns
-------
a filepath_or_buffer, the encoding, the compression
"""
if _is_url(filepath_or_buffer):
req = _urlopen(str(filepath_or_buffer))
if compression == 'infer':
content_encoding = req.headers.get('Content-Encoding', None)
if content_encoding == 'gzip':
compression = 'gzip'
else:
compression = None
# cat on the compression to the tuple returned by the function
to_return = list(maybe_read_encoded_stream(req, encoding, compression)) + \
[compression]
return tuple(to_return)
if _is_s3_url(filepath_or_buffer):
try:
import boto
except:
raise ImportError("boto is required to handle s3 files")
# Assuming AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY
# are environment variables
parsed_url = parse_url(filepath_or_buffer)
try:
conn = boto.connect_s3()
except boto.exception.NoAuthHandlerFound:
conn = boto.connect_s3(anon=True)
b = conn.get_bucket(parsed_url.netloc, validate=False)
if compat.PY2 and (compression == 'gzip' or
(compression == 'infer' and
filepath_or_buffer.endswith(".gz"))):
k = boto.s3.key.Key(b, parsed_url.path)
filepath_or_buffer = BytesIO(k.get_contents_as_string(
encoding=encoding))
else:
k = BotoFileLikeReader(b, parsed_url.path, encoding=encoding)
k.open('r') # Expose read errors immediately
filepath_or_buffer = k
return filepath_or_buffer, None, compression
return _expand_user(filepath_or_buffer), None, compression
示例5: _parse_host
def _parse_host(host, port, resource):
if not host.startswith('http'):
host = 'http://' + host
url_pack = parse_url(host)
is_secure = url_pack.scheme == 'https'
port = port or url_pack.port or (443 if is_secure else 80)
base_url = '%s:%d%s/%s/%s' % (
url_pack.hostname, port, url_pack.path, resource, PROTOCOL_VERSION)
return is_secure, base_url
示例6: __init__
def __init__(self, base_url, adapter_set="", user="", password=""):
self._base_url = parse_url(base_url)
self._adapter_set = adapter_set
self._user = user
self._password = password
self._session = {}
self._subscriptions = {}
self._current_subscription_key = 0
self._stream_connection = None
self._stream_connection_thread = None
开发者ID:femtotrader,项目名称:Lightstreamer-example-StockList-client-python,代码行数:10,代码来源:stock_list_demo.py
示例7: _set_control_link_url
def _set_control_link_url(self, custom_address=None):
"""Set the address to use for the Control Connection
in such cases where Lightstreamer is behind a Load Balancer.
"""
if custom_address is None:
self._control_url = self._base_url
else:
parsed_custom_address = parse_url("//" + custom_address)
self._control_url = parsed_custom_address._replace(
scheme=self._base_url[0]
)
示例8: _is_url
def _is_url(url):
"""Check to see if a URL has a valid protocol.
Parameters
----------
url : str or unicode
Returns
-------
isurl : bool
If `url` has a valid protocol return True otherwise False.
"""
try:
return parse_url(url).scheme in _VALID_URLS
except Exception:
return False
示例9: _read_json
def _read_json(self, path_or_url, compressed=True, advanced_path=False):
''' Load JSON for a path. Allows remote files in addition to local ones. '''
if parse_url(path_or_url).scheme in ['http', 'https']:
try:
req = _urlopen(path_or_url)
filename_or_buffer = BytesIO(req.read())
except HTTPError:
logging.exception("HTTP Error accessing %s" % path_or_url)
raise
compressed = False
else:
filename_or_buffer = path_or_url
try:
if compressed:
f = bz2.BZ2File(filename_or_buffer)
else:
if (type(filename_or_buffer) != BytesIO) and not isinstance(filename_or_buffer, StringIO):
f = codecs.open(filename_or_buffer, 'r+', encoding="utf-8")
else:
f = filename_or_buffer
rawjson = f.readline()
f.close()
except IOError:
logging.exception("Can't read %s. Did you pass the incorrect "
"'compressed=' argument?", path_or_url)
raise
except:
print(compressed, type(filename_or_buffer))
logging.exception("Can't open %s", path_or_url)
raise
# This is a bandaid for schema version 2.0, not over-engineered
# since upcoming releases of the extracted features
# dataset won't keep the basic/advanced split
try:
# For Python3 compatibility, decode to str object
if PY3 and (type(rawjson) != str):
rawjson = rawjson.decode()
volumejson = json.loads(rawjson)
except:
logging.exception("Problem reading JSON for %s. One common reason"
" for this error is an incorrect compressed= "
"argument", path_or_url)
raise
return volumejson
示例10: get_filepath_or_buffer
def get_filepath_or_buffer(filepath_or_buffer, encoding=None):
"""
If the filepath_or_buffer is a url, translate and return the buffer
passthru otherwise.
Parameters
----------
filepath_or_buffer : a url, filepath, or buffer
encoding : the encoding to use to decode py3 bytes, default is 'utf-8'
Returns
-------
a filepath_or_buffer, the encoding
"""
if _is_url(filepath_or_buffer):
req = _urlopen(str(filepath_or_buffer))
if compat.PY3: # pragma: no cover
if encoding:
errors = 'strict'
else:
errors = 'replace'
encoding = 'utf-8'
out = StringIO(req.read().decode(encoding, errors))
else:
encoding = None
out = req
return out, encoding
if _is_s3_url(filepath_or_buffer):
try:
import boto
except:
raise ImportError("boto is required to handle s3 files")
# Assuming AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY
# are environment variables
parsed_url = parse_url(filepath_or_buffer)
conn = boto.connect_s3()
b = conn.get_bucket(parsed_url.netloc)
k = boto.s3.key.Key(b)
k.key = parsed_url.path
filepath_or_buffer = StringIO(k.get_contents_as_string())
return filepath_or_buffer, None
return filepath_or_buffer, None
示例11: is_gcs_url
def is_gcs_url(url):
"""Check for a gcs url"""
try:
return parse_url(url).scheme in ['gcs', 'gs']
except Exception:
return False
示例12: is_s3_url
def is_s3_url(url):
"""Check for an s3, s3n, or s3a url"""
try:
return parse_url(url).scheme in ['s3', 's3n', 's3a']
except Exception:
return False
示例13: _is_s3_url
def _is_s3_url(url):
"""Check for an s3 url"""
try:
return parse_url(url).scheme == 's3'
except:
return False
示例14: _strip_schema
def _strip_schema(url):
"""Returns the url without the s3:// part"""
result = parse_url(url)
return result.netloc + result.path
示例15: _is_url
def _is_url(url):
"""Check to see if *url* has a valid protocol."""
try:
return parse_url(url).scheme in _VALID_URLS
except:
return False