本文整理汇总了Python中http_parser.parser.HttpParser.get_status_code方法的典型用法代码示例。如果您正苦于以下问题:Python HttpParser.get_status_code方法的具体用法?Python HttpParser.get_status_code怎么用?Python HttpParser.get_status_code使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类http_parser.parser.HttpParser
的用法示例。
在下文中一共展示了HttpParser.get_status_code方法的14个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: post_sync
# 需要导入模块: from http_parser.parser import HttpParser [as 别名]
# 或者: from http_parser.parser.HttpParser import get_status_code [as 别名]
def post_sync(sock, masterip, masterport):
obj = {"last_fileno": haystack.haystack_last_fileno}
body = json.dumps(obj)
sock.send("POST /sync HTTP/1.1\r\n")
sock.send("Host: %s:%d\r\n" % (masterip, masterport))
sock.send("Content-Length: %d\r\n" % len(body))
sock.send("Content-Type: application/json\r\n")
sock.send("Connection: keep-alive\r\n")
sock.send("\r\n")
sock.send(body)
parser = HttpParser()
while True:
#!!!ugly prevent recveive next http request
data = sock.recv(1)
if not data:
return False
recved = len(data)
nparsed = parser.execute(data, recved)
assert nparsed == recved
if parser.is_message_complete():
break
return parser.get_status_code() == 200
示例2: post_report
# 需要导入模块: from http_parser.parser import HttpParser [as 别名]
# 或者: from http_parser.parser.HttpParser import get_status_code [as 别名]
def post_report(sock):
st = os.statvfs(haystack_path)
available_size = st.f_bavail * st.f_frsize
obj = {}
obj["listenip"] = listenip
obj["listenport"] = listenport
obj["disk_available_size"] = available_size
obj["master"] = master
obj["groupid"] = groupid
obj["last_fileno"] = haystack.haystack_last_fileno
body = json.dumps(obj)
sock.send("POST /report HTTP/1.1\r\n")
sock.send("Host: %s:%d\r\n" % (track.ip, track.port))
sock.send("Content-Length: %d\r\n" % len(body))
sock.send("Content-Type: application/json\r\n")
sock.send("Connection: keep-alive\r\n")
sock.send("\r\n")
sock.send(body)
parser = HttpParser()
while True:
data = sock.recv(1024)
if not data:
return False
recved = len(data)
nparsed = parser.execute(data, recved)
assert nparsed == recved
if parser.is_message_complete():
break
return parser.get_status_code() == 200
示例3: Response_Parser
# 需要导入模块: from http_parser.parser import HttpParser [as 别名]
# 或者: from http_parser.parser.HttpParser import get_status_code [as 别名]
class Response_Parser():
def __init__(self):
self.parser = HttpParser()
self.len_response = 0
self.len_body = 0
self.body = None
def parse(self, raw_response):
self.len_response = len(bytearray(raw_response))
self.parser.execute(raw_response, self.len_response)
self.body = self.parser.recv_body()
self.len_body = len(bytearray(self.body))
def get_all_keys(self):
"""Get All the key in request headers."""
return self.parser.get_headers().keys()
def get_keys(self, *args):
header_keys = {}
for key in args:
header_keys[key] = self.parser.get_headers().get(key, '-')
return header_keys
def get_reponse(self, *args):
values = self.get_keys(*args)
status_code = self.parser.get_status_code()
obj = HTTP_Response(status_code, values, self.len_response, self.len_body)
return obj
def get_body(self):
return self.body
示例4: __init__
# 需要导入模块: from http_parser.parser import HttpParser [as 别名]
# 或者: from http_parser.parser.HttpParser import get_status_code [as 别名]
def __init__(self, raw):
resp = HttpParser()
resp.execute(raw.response, len(raw.response))
self.headers = resp.get_headers()
self.body = "".join(resp._body)
self.raw = raw
self.code = resp.get_status_code()
self._json = None
示例5: request
# 需要导入模块: from http_parser.parser import HttpParser [as 别名]
# 或者: from http_parser.parser.HttpParser import get_status_code [as 别名]
def request(self, method, url, headers={}, body=None, timeout=None):
'''Issues a `method` request to `path` on the
connected server. Sends along `headers`, and
body.
Very low level--you must set "host" yourself,
for example. It will set Content-Length,
however.
'''
url_info = urlparse(url)
fake_wsgi = dict(
(cgi_name(n), v) for n, v in headers.iteritems())
fake_wsgi.update({
'HTTP_METHOD' : method,
'SCRIPT_NAME' : '',
'PATH_INFO' : url_info[2],
'QUERY_STRING' : url_info[4],
'wsgi.version' : (1,0),
'wsgi.url_scheme' : 'http', # XXX incomplete
'wsgi.input' : cStringIO.StringIO(body or ''),
'wsgi.errors' : FileLikeErrorLogger(hlog),
'wsgi.multithread' : False,
'wsgi.multiprocess' : False,
'wsgi.run_once' : False,
})
req = Request(fake_wsgi)
timeout_handler = TimeoutHandler(timeout or 60)
send('%s %s HTTP/1.1\r\n%s' % (req.method, req.url, str(req.headers)))
if body:
send(body)
h = HttpParser()
body = []
data = None
while True:
if data:
used = h.execute(data, len(data))
if h.is_headers_complete():
body.append(h.recv_body())
if h.is_message_complete():
data = data[used:]
break
ev, val = first(receive_any=True, sleep=timeout_handler.remaining())
if ev == 'sleep': timeout_handler.timeout()
data = val
resp = Response(
response=''.join(body),
status=h.get_status_code(),
headers=h.get_headers(),
)
return resp
示例6: heartbeat
# 需要导入模块: from http_parser.parser import HttpParser [as 别名]
# 或者: from http_parser.parser.HttpParser import get_status_code [as 别名]
def heartbeat(sock):
ip, port = sock.getpeername()
parser = HttpParser()
sock.send("GET /ping HTTP/1.1\r\nHost: %s:%d\r\n\r\n" % (ip, port))
while True:
data = sock.recv(1024)
if not data:
return False
recved = len(data)
nparsed = parser.execute(data, recved)
assert nparsed == recved
if parser.is_message_complete():
break
return parser.get_status_code() == 200
示例7: makeRequest
# 需要导入模块: from http_parser.parser import HttpParser [as 别名]
# 或者: from http_parser.parser.HttpParser import get_status_code [as 别名]
def makeRequest(self, host, url="/", port=80, method='GET', headers=None, postdata=None):
assert self.e is not None
evSet = self.e.wait() # noqa: F841
# log.debug("Generating raw http request")
self.s.connect((host, port))
if headers is None:
headers = {
"Accept": "*/*",
"User-Agent": self.useragent
}
req = self.rawHttpReq(host, url, method, headers, postdata)
self.s.sendall(req.encode())
h = []
body = []
p = HttpParser()
tlen = 0
while True:
data = self.s.recv(2048)
if not data:
break
rlen = len(data)
tlen += rlen
nparsed = p.execute(data, rlen)
assert nparsed == rlen
if p.is_headers_complete():
h = p.get_headers()
# log.debug(p.get_headers())
if p.is_partial_body():
body.append(p.recv_body())
if p.is_message_complete():
break
self.s.close()
res = {'status': p.get_status_code(), 'length': tlen, 'headers': h, 'body': body, 'request': req}
print(res)
示例8: receive
# 需要导入模块: from http_parser.parser import HttpParser [as 别名]
# 或者: from http_parser.parser.HttpParser import get_status_code [as 别名]
def receive(self):
h = HttpParser()
body = []
data = None
while True:
if data:
used = h.execute(data, len(data))
if h.is_headers_complete():
body.append(h.recv_body())
if h.is_message_complete():
data = data[used:]
break
data = self.s.recv(BUFSIZE)
return Response(response=''.join(body),
status=h.get_status_code(),
headers=h.get_headers(),
)
示例9: __init__
# 需要导入模块: from http_parser.parser import HttpParser [as 别名]
# 或者: from http_parser.parser.HttpParser import get_status_code [as 别名]
#.........这里部分代码省略.........
def _prepare_host(self, host):
host = encodings.idna.nameprep(host)
return b'.'.join(encodings.idna.ToASCII(x) for x in host.split('.')).decode('ascii')
def on_data(self, data, close=False, addr=None):
if close:
logger.debug('%s: connection to %s closed.', self.origurl, addr)
if (close and self._redirected_stream is self.stream) or self._finished:
# The connection is closing, and we are being redirected or we're done.
self._redirected_stream = None
return
recved = len(data)
logger.debug('%s: received data: %d bytes', self.origurl, recved)
p = self.parser
nparsed = p.execute(data, recved)
if close:
# feed EOF
p.execute(b'', 0)
if not self.headers_done and p.is_headers_complete():
if not self.on_headers_done():
return
if p.is_partial_body():
chunk = p.recv_body()
if self.finder is None:
# redirected but has body received
return
t = self.feed_finder(chunk)
if t is not None:
self.run_callback(t)
return
if p.is_message_complete():
if self.finder is None:
# redirected but has body received
return
t = self.feed_finder(None)
# if title not found, t is None
self.run_callback(t)
elif close:
self.run_callback(self.stream.error or ConnectionClosed)
def before_connected(self):
'''check if something wrong before connected'''
if not self._connected and not self._finished:
self.run_callback(self.stream.error)
def process_cookie(self):
setcookie = self.headers.get('Set-Cookie', None)
if not setcookie:
return
cookies = [c.rsplit(None, 1)[-1] for c in setcookie.split('; expires')[:-1]]
self._cookie = 'Cookie: ' + '; '.join(cookies)
def on_headers_done(self):
'''returns True if should proceed, None if should stop for current chunk'''
self.headers_done = True
self.headers = self.parser.get_headers()
self.status_code = self.parser.get_status_code()
if self.status_code in (301, 302):
self.process_cookie() # or we may be redirecting to a loop
logger.debug('%s: redirect to %s', self.origurl, self.headers['Location'])
self.followed_times += 1
if self.followed_times > self.max_follows:
self.run_callback(TooManyRedirection)
else:
newurl = urljoin(self.fullurl, self.headers['Location'])
self._redirected_stream = self.stream
self.new_url(newurl)
return
try:
l = int(self.headers.get('Content-Length', None))
except (ValueError, TypeError):
l = None
ctype = self.headers.get('Content-Type', 'text/html')
mt = defaultMediaType._replace(type=ctype, size=l)
for finder in self._content_finders:
f = finder.match_type(mt)
if f:
self.finder = f
break
else:
self.run_callback(mt)
return
return True
def feed_finder(self, chunk):
'''feed data to TitleFinder, return the title if found'''
t = self.finder(chunk)
if t is not None:
return t
示例10: HttpStream
# 需要导入模块: from http_parser.parser import HttpParser [as 别名]
# 或者: from http_parser.parser.HttpParser import get_status_code [as 别名]
class HttpStream(object):
""" An HTTP parser providing higher-level access to a readable,
sequential io.RawIOBase object. You can use implementions of
http_parser.reader (IterReader, StringReader, SocketReader) or
create your own.
"""
def __init__(self, stream, kind=HTTP_BOTH, decompress=False):
""" constructor of HttpStream.
:attr stream: an io.RawIOBase object
:attr kind: Int, could be 0 to parseonly requests,
1 to parse only responses or 2 if we want to let
the parser detect the type.
"""
self.parser = HttpParser(kind=kind, decompress=decompress)
self.stream = stream
def _check_headers_complete(self):
if self.parser.is_headers_complete():
return
while True:
try:
next(self)
except StopIteration:
if self.parser.is_headers_complete():
return
raise NoMoreData("Can't parse headers")
if self.parser.is_headers_complete():
return
def _wait_status_line(self, cond):
if self.parser.is_headers_complete():
return True
data = []
if not cond():
while True:
try:
d = next(self)
data.append(d)
except StopIteration:
if self.parser.is_headers_complete():
return True
raise BadStatusLine(b"".join(data))
if cond():
return True
return True
def _wait_on_url(self):
return self._wait_status_line(self.parser.get_url)
def _wait_on_status(self):
return self._wait_status_line(self.parser.get_status_code)
def url(self):
""" get full url of the request """
self._wait_on_url()
return self.parser.get_url()
def path(self):
""" get path of the request (url without query string and
fragment """
self._wait_on_url()
return self.parser.get_path()
def query_string(self):
""" get query string of the url """
self._wait_on_url()
return self.parser.get_query_string()
def fragment(self):
""" get fragment of the url """
self._wait_on_url()
return self.parser.get_fragment()
def version(self):
self._wait_on_status()
return self.parser.get_version()
def status_code(self):
""" get status code of a response as integer """
self._wait_on_status()
return self.parser.get_status_code()
def status(self):
""" return complete status with reason """
status_code = self.status_code()
reason = status_reasons.get(int(status_code), 'unknown')
return "%s %s" % (status_code, reason)
def method(self):
""" get HTTP method as string"""
self._wait_on_status()
return self.parser.get_method()
#.........这里部分代码省略.........
示例11: request
# 需要导入模块: from http_parser.parser import HttpParser [as 别名]
# 或者: from http_parser.parser.HttpParser import get_status_code [as 别名]
def request(self, method, url, headers=None, body=None, timeout=None):
"""Issues a `method` request to `path` on the
connected server. Sends along `headers`, and
body.
Very low level--you must set "host" yourself,
for example. It will set Content-Length,
however.
"""
headers = headers or {}
url_info = urlparse(url)
fake_wsgi = dict((cgi_name(n), str(v).strip()) for n, v in headers.iteritems())
if body and "CONTENT_LENGTH" not in fake_wsgi:
# If the caller hasn't set their own Content-Length but submitted
# a body, we auto-set the Content-Length header here.
fake_wsgi["CONTENT_LENGTH"] = str(len(body))
fake_wsgi.update(
{
"REQUEST_METHOD": method,
"SCRIPT_NAME": "",
"PATH_INFO": url_info[2],
"QUERY_STRING": url_info[4],
"wsgi.version": (1, 0),
"wsgi.url_scheme": "http", # XXX incomplete
"wsgi.input": cStringIO.StringIO(body or ""),
"wsgi.errors": FileLikeErrorLogger(hlog),
"wsgi.multithread": False,
"wsgi.multiprocess": False,
"wsgi.run_once": False,
}
)
req = Request(fake_wsgi)
timeout_handler = TimeoutHandler(timeout or 60)
url = str(req.path)
if req.query_string:
url += "?" + str(req.query_string)
send("%s %s HTTP/1.1\r\n%s" % (req.method, url, str(req.headers)))
if body:
send(body)
h = HttpParser()
body = []
data = None
while True:
if data:
used = h.execute(data, len(data))
if h.is_headers_complete():
body.append(h.recv_body())
if h.is_message_complete():
data = data[used:]
break
ev, val = first(receive_any=True, sleep=timeout_handler.remaining())
if ev == "sleep":
timeout_handler.timeout()
data = val
resp = Response(response="".join(body), status=h.get_status_code(), headers=h.get_headers())
return resp
示例12: __init__
# 需要导入模块: from http_parser.parser import HttpParser [as 别名]
# 或者: from http_parser.parser.HttpParser import get_status_code [as 别名]
#.........这里部分代码省略.........
logger.debug('%s: received data: %d bytes', self.origurl, recved)
p = self.parser
nparsed = p.execute(data, recved)
if close:
# feed EOF
p.execute(b'', 0)
if not self.headers_done and p.is_headers_complete():
if not self.on_headers_done():
return
if p.is_partial_body():
chunk = p.recv_body()
if not self.charset:
m = self.meta_charset.search(chunk)
if m:
self.charset = (m.group(1) or m.group(2)).decode('latin1')
t = self.feed_finder(chunk)
if t:
self.run_callback(t)
return
if p.is_message_complete():
t = self.feed_finder(None)
# if title not found, t is None
self.run_callback(t)
elif close:
self.run_callback(self.stream.error or ConnectionClosed)
def before_connected(self):
'''check if something wrong before connected'''
if not self._connected and not self._finished:
self.run_callback(self.stream.error)
def process_cookie(self):
setcookie = self.headers.get('Set-Cookie', None)
if not setcookie:
return
cookies = [c.rsplit(None, 1)[-1] for c in setcookie.split('; expires')[:-1]]
self._cookie = 'Cookie: ' + '; '.join(cookies)
def on_headers_done(self):
'''returns True if should proceed, None if should stop for current chunk'''
self.headers_done = True
self.headers = self.parser.get_headers()
self.status_code = self.parser.get_status_code()
if self.status_code in (301, 302):
self.process_cookie() # or we may be redirecting to a loop
logger.debug('%s: redirect to %s', self.origurl, self.headers['Location'])
self.followed_times += 1
if self.followed_times > self.max_follows:
self.run_callback(TooManyRedirection)
else:
newurl = urljoin(self.fullurl, self.headers['Location'])
self._redirecting = True
self.new_url(newurl)
return
ctype = self.headers.get('Content-Type', 'text/html')
if ctype.find('html') == -1:
try:
l = int(self.headers.get('Content-Length', None))
except (ValueError, TypeError):
l = None
mt = defaultMediaType._replace(type=ctype, size=l)
ctype = ctype.split(';', 1)[0]
if ctype == 'image/png':
self.finder = PNGFinder(mt)
elif ctype == 'image/jpeg':
self.finder = JPEGFinder(mt)
elif ctype == 'image/gif':
self.finder = GIFFinder(mt)
else:
self.run_callback(mt)
return
else:
self.finder = TitleFinder()
pos = ctype.find('charset=')
if pos > 0:
self.charset = ctype[pos+8:]
return True
def feed_finder(self, chunk):
'''feed data to TitleFinder, return the title if found'''
t = self.finder(chunk)
if t:
if self.charset is None:
self.charset = self.default_charset
if isinstance(t, bytes):
try:
title = replaceEntities(t.decode(self.charset))
return title
except (UnicodeDecodeError, LookupError):
return t
else:
return t
示例13: HttpProxyProtocol
# 需要导入模块: from http_parser.parser import HttpParser [as 别名]
# 或者: from http_parser.parser.HttpParser import get_status_code [as 别名]
#.........这里部分代码省略.........
token_secret=token.secret,
query_params=parse_qs(parsed.query),
body_params=parse_qs(body)
)
else:
raise ValueError('No signing algorithm known for URL: {}'
.format(url))
if self._mitm is None:
url = urlparse(uri)
host = url.hostname
port = url.port
if port is None:
port = 80 if url.scheme == 'http' else 443
log.debug('Connecting to upstream (plaintext).')
upstream = yield from asyncio.open_connection(host, port)
upstream_reader, upstream_writer = upstream
request = render_http_request(method, uri, version, headers, body)
upstream_writer.write(request)
response = b''
parser = HttpParser()
while True:
if not parser.is_headers_complete():
data = yield from upstream_reader.readline()
else:
data = yield from upstream_reader.read(
int(parser.get_headers()['Content-Length'])
)
log.debug('Received plaintext from upstream: {}'.format(data))
parser.execute(data, len(data))
if parser.is_partial_body():
body += parser.recv_body()
if parser.is_message_complete():
version = self._parser.get_version()
status = self._parser.get_status_code()
reason = None # For some reason, the parser doesn't expose this :(
headers = self._parser.get_headers()
if status == 200:
self._token_store.update_rate_limit(url, headers)
log.debug('Plaintext upstream status: {}'.format(status))
log.debug('Plaintext upstream headers: {}'.format(headers))
log.debug('Plaintext upstream body: {}...'.format(body[:1000]))
response = render_http_response(
version, status, reason, headers, body
)
break
upstream_writer.close()
else:
upstream_write = self._mitm.forward
request = render_http_request(method, uri, version, headers, body)
upstream_write(request)
response = yield from self._mitm.receive()
version, status, reason, headers, body = response
if status == 200:
self._token_store.update_rate_limit(token, url, headers)
response = render_http_response(
version, status, reason, headers, body
)
# Forward the upstream response to the client.
self._transport.write(response)
self._transport.close()
def _set_header(self, key, value):
''' Set a header value. '''
key = key.strip().upper()
value = value.strip()
self._headers[key] = value
@asyncio.coroutine
def _start_mitm(self, uri, version):
''' MITM a connection to the upstream server. '''
log.debug('The proxy is starting an MITM connection.')
host, port = uri.split(':')
port = int(port)
self._mitm_host = host
_, self._mitm = yield from self._loop.create_connection(
lambda: MitmProtocol(self._loop, version, self),
host,
port,
ssl = ssl.create_default_context()
)
示例14: MitmProtocol
# 需要导入模块: from http_parser.parser import HttpParser [as 别名]
# 或者: from http_parser.parser.HttpParser import get_status_code [as 别名]
class MitmProtocol(asyncio.Protocol):
''' Handles details of MITMing a TLS connection. '''
def __init__(self, loop, http_version, proxy):
''' Constructor. '''
self._http_version = http_version
self._loop = loop
self._parser = HttpParser()
self._proxy = proxy
self._received = asyncio.Future()
self._body = b''
def connection_made(self, transport):
''' Save a reference to the transport. '''
log.debug('MITM connection opened.')
self._transport = transport
cert = self._transport.get_extra_info('peercert')
log.debug('MITM upstream certificate: {}'.format(cert))
self._loop.call_soon(self._proxy.start_tls, self._http_version)
def connection_lost(self, exc):
log.debug('MITM connection closed.')
self._received.cancel()
def data_received(self, data):
''' Accumulate request data. '''
log.debug('MITM data received: {}'.format(data))
self._parser.execute(data, len(data))
if self._parser.is_partial_body():
self._body += self._parser.recv_body()
if self._parser.is_message_complete():
version = self._parser.get_version()
status = self._parser.get_status_code()
reason = None # For some reason, the parser doesn't expose this :(
headers = self._parser.get_headers()
log.debug('MITM upstream status: {}'.format(status))
log.debug('MITM upstream headers: {}'.format(headers))
log.debug('MITM upstream body: {}...'.format(self._body[:1000]))
self._received.set_result(
(version, status, reason, headers, self._body)
)
self._transport.close()
def forward(self, data):
''' Forward data to upstream host. '''
log.debug('MITM sending data: {}'.format(data))
self._transport.write(data)
@asyncio.coroutine
def receive(self):
''' Read data received by this MITM instance. '''
response = yield from self._received
return response