本文整理汇总了Python中werkzeug.urls.url_unquote函数的典型用法代码示例。如果您正苦于以下问题:Python url_unquote函数的具体用法?Python url_unquote怎么用?Python url_unquote使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了url_unquote函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: simple
def simple(request, graph, startNode, endNode):
start = url_unquote(startNode)
end = url_unquote(endNode)
if not (graph.has_node(start) and graph.has_node(end)):
return request.respondJson({'message': 'node not in graph'},
NOT_FOUND)
ipaths = nx.shortest_simple_paths(graph, start, end)
data = {'paths': tuple(ipaths)}
request.respondJson(data)
示例2: deleteEdge
def deleteEdge(request, graph, startNode, endNode):
start = url_unquote(startNode)
end = url_unquote(endNode)
try:
_edge = graph.edge[start][end]
except KeyError:
return request.respondJson({'message': 'edge not in graph'},
NOT_FOUND)
graph.remove_edge(start, end)
request.respondJson({'message': 'edge deleted'})
示例3: showEdge
def showEdge(request, graph, startNode, endNode):
start = url_unquote(startNode)
end = url_unquote(endNode)
try:
edge = graph.edge[start][end]
except KeyError:
return request.respondJson(
{'message': 'nodes not in graph or not linked'},
NOT_FOUND)
edge = dict(edge)
edge.update({'start': start, 'end': end,
'graph': graph.name})
request.respondJson(edge)
示例4: unquote_url_values
def unquote_url_values(endpoint, values):
"""Preprocessor that URL-decodes the values given in the URL.
"""
for key, val in values.items():
if isinstance(val, basestring):
values[key] = url_unquote(val)
示例5: index
def index():
if request.args.has_key("magnet"):
magnet = url_unquote(request.args["magnet"]).encode(request.charset)
magnet_xt = url_decode(magnet[magnet.index("?") + 1 :])["xt"]
torrent = cache.get(magnet_xt)
if not torrent:
try:
handle = lt.add_magnet_uri(
ses,
magnet,
{"save_path": "./invalid", "paused": False, "auto_managed": False, "duplicate_is_error": False},
)
while not handle.has_metadata():
time.sleep(0.01)
handle.pause()
info = handle.get_torrent_info()
torrent = create_torrent(info)
cache.set(magnet_xt, torrent)
ses.remove_torrent(handle, lt.options_t.delete_files)
except:
torrent = cache.get(magnet_xt)
response = Response(response=torrent[1], mimetype="application/x-bittorrent")
response.headers.add("Content-Disposition", "attachment", filename=torrent[0])
return response
return render_template("index.html")
示例6: createEdge
def createEdge(request, graph, startNode, endNode):
start = url_unquote(startNode)
end = url_unquote(endNode)
if not (graph.has_node(start) and graph.has_node(end)):
return request.respondJson({'message': 'node not in graph'},
NOT_FOUND)
attrib = request.json()
attrib['created'] = datetime.utcnow().strftime(DATETIME_FORMAT)
graph.add_edge(start, end, **attrib)
data = graph.edge[start][end]
data.update({'start': start, 'end': end,
'graph': graph.name})
request.respondJson(data, CREATED)
示例7: showNode
def showNode(request, graph, nodeName):
nodeName = url_unquote(nodeName)
try:
data = _reprNode(graph, nodeName)
except KeyError, err:
return request.respondJson({'message': err.message},
NOT_FOUND)
示例8: test_quoting
def test_quoting(self):
assert urls.url_quote(u'\xf6\xe4\xfc') == '%C3%B6%C3%A4%C3%BC'
assert urls.url_unquote(urls.url_quote(u'#%="\xf6')) == u'#%="\xf6'
assert urls.url_quote_plus('foo bar') == 'foo+bar'
assert urls.url_unquote_plus('foo+bar') == 'foo bar'
assert urls.url_encode({'a': None, 'b': 'foo bar'}) == 'b=foo+bar'
assert urls.url_fix(u'http://de.wikipedia.org/wiki/Elf (Begriffsklärung)') == \
'http://de.wikipedia.org/wiki/Elf%20%28Begriffskl%C3%A4rung%29'
示例9: test_quoting
def test_quoting(self):
self.assert_strict_equal(urls.url_quote(u'\xf6\xe4\xfc'), '%C3%B6%C3%A4%C3%BC')
self.assert_strict_equal(urls.url_unquote(urls.url_quote(u'#%="\xf6')), u'#%="\xf6')
self.assert_strict_equal(urls.url_quote_plus('foo bar'), 'foo+bar')
self.assert_strict_equal(urls.url_unquote_plus('foo+bar'), u'foo bar')
self.assert_strict_equal(urls.url_encode({b'a': None, b'b': b'foo bar'}), 'b=foo+bar')
self.assert_strict_equal(urls.url_encode({u'a': None, u'b': u'foo bar'}), 'b=foo+bar')
self.assert_strict_equal(urls.url_fix(u'http://de.wikipedia.org/wiki/Elf (Begriffsklärung)'),
'http://de.wikipedia.org/wiki/Elf%20(Begriffskl%C3%A4rung)')
示例10: make_environ
def make_environ(self):
request_url = url_parse(self.path)
def shutdown_server():
self.server.shutdown_signal = True
url_scheme = 'http' if self.server.ssl_context is None else 'https'
if not self.client_address:
self.client_address = '<local>'
if isinstance(self.client_address, str):
self.client_address = (self.client_address, 0)
else:
pass
path_info = url_unquote(request_url.path)
environ = {
'wsgi.version': (1, 0),
'wsgi.url_scheme': url_scheme,
'wsgi.input': self.rfile,
'wsgi.errors': sys.stderr,
'wsgi.multithread': self.server.multithread,
'wsgi.multiprocess': self.server.multiprocess,
'wsgi.run_once': False,
'werkzeug.server.shutdown': shutdown_server,
'SERVER_SOFTWARE': self.server_version,
'REQUEST_METHOD': self.command,
'SCRIPT_NAME': '',
'PATH_INFO': wsgi_encoding_dance(path_info),
'QUERY_STRING': wsgi_encoding_dance(request_url.query),
# Non-standard, added by mod_wsgi, uWSGI
"REQUEST_URI": wsgi_encoding_dance(self.path),
# Non-standard, added by gunicorn
"RAW_URI": wsgi_encoding_dance(self.path),
'REMOTE_ADDR': self.address_string(),
'REMOTE_PORT': self.port_integer(),
'SERVER_NAME': self.server.server_address[0],
'SERVER_PORT': str(self.server.server_address[1]),
'SERVER_PROTOCOL': self.request_version
}
for key, value in self.get_header_items():
key = key.upper().replace('-', '_')
if key not in ('CONTENT_TYPE', 'CONTENT_LENGTH'):
key = 'HTTP_' + key
if key in environ:
value = "{},{}".format(environ[key], value)
environ[key] = value
if environ.get('HTTP_TRANSFER_ENCODING', '').strip().lower() == 'chunked':
environ['wsgi.input_terminated'] = True
environ['wsgi.input'] = DechunkedInput(environ['wsgi.input'])
if request_url.scheme and request_url.netloc:
environ['HTTP_HOST'] = request_url.netloc
return environ
示例11: test_quoting
def test_quoting(self):
assert urls.url_quote(u"\xf6\xe4\xfc") == "%C3%B6%C3%A4%C3%BC"
assert urls.url_unquote(urls.url_quote(u'#%="\xf6')) == u'#%="\xf6'
assert urls.url_quote_plus("foo bar") == "foo+bar"
assert urls.url_unquote_plus("foo+bar") == "foo bar"
assert urls.url_encode({"a": None, "b": "foo bar"}) == "b=foo+bar"
assert (
urls.url_fix(u"http://de.wikipedia.org/wiki/Elf (Begriffsklärung)")
== "http://de.wikipedia.org/wiki/Elf%20%28Begriffskl%C3%A4rung%29"
)
示例12: delete
def delete(self):
from txplaya.playlistregistry import playlistRegistry
playlistName = url_unquote(self.playlistNameArg)
playlistRegistry.deletePlaylist(playlistName)
event = {'event': 'PlaylistRegistryUpdated',
'data': {'list': playlistRegistry.list_()}}
self.mainController.announce(event)
return {'msg': 'Playlist deleted'}
示例13: worker
def worker(domain, save_rules):
"""Worker process, fetches url from the front,
crawl, push new urls to front
and pushes content to sink if matched on a rule"""
ctx = zmq.Context()
worker_ = ctx.socket(zmq.REQ)
worker_.connect('tcp://localhost:5050')
saver = ctx.socket(zmq.PUSH)
saver.connect('tcp://localhost:5051')
urlsink = ctx.socket(zmq.PUSH)
urlsink.connect('tcp://localhost:5052')
matcher = Map(map(Rule, save_rules)).bind('', '/').match
while True:
worker_.send('')
url = worker_.recv().decode('utf-8')
try:
q = rq.get(u'http://%s%s' % (domain, url_unquote(url)),
allow_redirects = False)
except ConnectionError:
continue
if q.status_code == 301 or q.status_code == 302:
redirect = q.headers['location']
if domain in redirect:
# only sent to front
urlsink.send(redirect.split(domain)[1].encode('utf-8'))
continue
html = q.content
try: _, data = matcher(url)
except NotFound: pass
else:
# needs to be saved, sends html, url, data to saver
data = zlib.compress(json.dumps([html, url, data]))
saver.send(data)
del data
fetched = set()
for link in fromstring(html).cssselect("a[href]"):
link = link.attrib['href'].split('#')[0]
if link.startswith('file://') or link.startswith('javascript:'): continue
if not link.startswith('http'):
fetched.add(link)
elif domain in link:
fetched.add(link.split(domain)[1])
for l in fetched:
urlsink.send(l.encode('utf-8'))
示例14: wrapped
def wrapped(request, **kwargs):
graphName = kwargs.pop(argName)
graphName = url_unquote(graphName)
try:
graph = request.app.graphs[graphName]
except KeyError:
return request.respondJson({'message': 'graph {0} not found'.format(graphName)},
NOT_FOUND)
graph.name = graphName
kwargs[argName] = graph
return fc(request, **kwargs)
示例15: test_quoting
def test_quoting():
strict_eq(urls.url_quote(u'\xf6\xe4\xfc'), '%C3%B6%C3%A4%C3%BC')
strict_eq(urls.url_unquote(urls.url_quote(u'#%="\xf6')), u'#%="\xf6')
strict_eq(urls.url_quote_plus('foo bar'), 'foo+bar')
strict_eq(urls.url_unquote_plus('foo+bar'), u'foo bar')
strict_eq(urls.url_quote_plus('foo+bar'), 'foo%2Bbar')
strict_eq(urls.url_unquote_plus('foo%2Bbar'), u'foo+bar')
strict_eq(urls.url_encode({b'a': None, b'b': b'foo bar'}), 'b=foo+bar')
strict_eq(urls.url_encode({u'a': None, u'b': u'foo bar'}), 'b=foo+bar')
strict_eq(urls.url_fix(u'http://de.wikipedia.org/wiki/Elf (Begriffsklärung)'),
'http://de.wikipedia.org/wiki/Elf%20(Begriffskl%C3%A4rung)')
strict_eq(urls.url_quote_plus(42), '42')
strict_eq(urls.url_quote(b'\xff'), '%FF')