本文整理汇总了Python中httplib.InvalidURL方法的典型用法代码示例。如果您正苦于以下问题:Python httplib.InvalidURL方法的具体用法?Python httplib.InvalidURL怎么用?Python httplib.InvalidURL使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类httplib
的用法示例。
在下文中一共展示了httplib.InvalidURL方法的12个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: test_host_port
# 需要导入模块: import httplib [as 别名]
# 或者: from httplib import InvalidURL [as 别名]
def test_host_port(self):
# Check invalid host_port
# Note that httplib does not accept user:password@ in the host-port.
for hp in ("www.python.org:abc", "user:password@www.python.org"):
self.assertRaises(httplib.InvalidURL, httplib.HTTP, hp)
for hp, h, p in (("[fe80::207:e9ff:fe9b]:8000", "fe80::207:e9ff:fe9b",
8000),
("www.python.org:80", "www.python.org", 80),
("www.python.org", "www.python.org", 80),
("www.python.org:", "www.python.org", 80),
("[fe80::207:e9ff:fe9b]", "fe80::207:e9ff:fe9b", 80)):
http = httplib.HTTP(hp)
c = http._conn
if h != c.host:
self.fail("Host incorrectly parsed: %s != %s" % (h, c.host))
if p != c.port:
self.fail("Port incorrectly parsed: %s != %s" % (p, c.host))
示例2: test_url_path_with_control_char_rejected
# 需要导入模块: import httplib [as 别名]
# 或者: from httplib import InvalidURL [as 别名]
def test_url_path_with_control_char_rejected(self):
for char_no in range(0, 0x21) + range(0x7f, 0x100):
char = chr(char_no)
schemeless_url = "//localhost:7777/test%s/" % char
self.fakehttp(b"HTTP/1.1 200 OK\r\n\r\nHello.")
try:
# We explicitly test urllib.request.urlopen() instead of the top
# level 'def urlopen()' function defined in this... (quite ugly)
# test suite. They use different url opening codepaths. Plain
# urlopen uses FancyURLOpener which goes via a codepath that
# calls urllib.parse.quote() on the URL which makes all of the
# above attempts at injection within the url _path_ safe.
escaped_char_repr = repr(char).replace('\\', r'\\')
InvalidURL = httplib.InvalidURL
with self.assertRaisesRegexp(
InvalidURL, "contain control.*" + escaped_char_repr):
urllib2.urlopen("http:" + schemeless_url)
with self.assertRaisesRegexp(
InvalidURL, "contain control.*" + escaped_char_repr):
urllib2.urlopen("https:" + schemeless_url)
finally:
self.unfakehttp()
示例3: test_url_path_with_newline_header_injection_rejected
# 需要导入模块: import httplib [as 别名]
# 或者: from httplib import InvalidURL [as 别名]
def test_url_path_with_newline_header_injection_rejected(self):
self.fakehttp(b"HTTP/1.1 200 OK\r\n\r\nHello.")
host = "localhost:7777?a=1 HTTP/1.1\r\nX-injected: header\r\nTEST: 123"
schemeless_url = "//" + host + ":8080/test/?test=a"
try:
# We explicitly test urllib2.urlopen() instead of the top
# level 'def urlopen()' function defined in this... (quite ugly)
# test suite. They use different url opening codepaths. Plain
# urlopen uses FancyURLOpener which goes via a codepath that
# calls urllib.parse.quote() on the URL which makes all of the
# above attempts at injection within the url _path_ safe.
InvalidURL = httplib.InvalidURL
with self.assertRaisesRegexp(InvalidURL,
r"contain control.*\\r.*(found at least . .)"):
urllib2.urlopen("http:{}".format(schemeless_url))
with self.assertRaisesRegexp(InvalidURL,
r"contain control.*\\n"):
urllib2.urlopen("https:{}".format(schemeless_url))
finally:
self.unfakehttp()
示例4: test_url_host_with_control_char_rejected
# 需要导入模块: import httplib [as 别名]
# 或者: from httplib import InvalidURL [as 别名]
def test_url_host_with_control_char_rejected(self):
for char_no in list(range(0, 0x21)) + [0x7f]:
char = chr(char_no)
schemeless_url = "//localhost{}/test/".format(char)
self.fakehttp(b"HTTP/1.1 200 OK\r\n\r\nHello.")
try:
escaped_char_repr = repr(char).replace('\\', r'\\')
InvalidURL = httplib.InvalidURL
with self.assertRaisesRegexp(InvalidURL,
"contain control.*{}".format(escaped_char_repr)):
urllib2.urlopen("http:{}".format(schemeless_url))
with self.assertRaisesRegexp(InvalidURL,
"contain control.*{}".format(escaped_char_repr)):
urllib2.urlopen("https:{}".format(schemeless_url))
finally:
self.unfakehttp()
示例5: crawl
# 需要导入模块: import httplib [as 别名]
# 或者: from httplib import InvalidURL [as 别名]
def crawl(target):
try:
visited = set()
threadData = getCurrentThreadData()
threadData.shared.value = oset()
def crawlThread():
threadData = getCurrentThreadData()
while kb.threadContinue:
with kb.locks.limit:
if threadData.shared.unprocessed:
current = threadData.shared.unprocessed.pop()
if current in visited:
continue
elif conf.crawlExclude and re.search(conf.crawlExclude, current):
dbgMsg = "skipping '%s'" % current
logger.debug(dbgMsg)
continue
else:
visited.add(current)
else:
break
content = None
try:
if current:
content = Request.getPage(url=current, crawling=True, raise404=False)[0]
except SqlmapConnectionException, ex:
errMsg = "connection exception detected (%s). skipping " % ex
errMsg += "URL '%s'" % current
logger.critical(errMsg)
except SqlmapSyntaxException:
errMsg = "invalid URL detected. skipping '%s'" % current
logger.critical(errMsg)
except httplib.InvalidURL, ex:
errMsg = "invalid URL detected (%s). skipping " % ex
errMsg += "URL '%s'" % current
logger.critical(errMsg)
示例6: parseSitemap
# 需要导入模块: import httplib [as 别名]
# 或者: from httplib import InvalidURL [as 别名]
def parseSitemap(url, retVal=None):
global abortedFlag
if retVal is not None:
logger.debug("parsing sitemap '%s'" % url)
try:
if retVal is None:
abortedFlag = False
retVal = oset()
try:
content = Request.getPage(url=url, raise404=True)[0] if not abortedFlag else ""
except httplib.InvalidURL:
errMsg = "invalid URL given for sitemap ('%s')" % url
raise SqlmapSyntaxException, errMsg
for match in re.finditer(r"<loc>\s*([^<]+)", content or ""):
if abortedFlag:
break
url = match.group(1).strip()
if url.endswith(".xml") and "sitemap" in url.lower():
if kb.followSitemapRecursion is None:
message = "sitemap recursion detected. Do you want to follow? [y/N] "
test = readInput(message, default="N")
kb.followSitemapRecursion = test[0] in ("y", "Y")
if kb.followSitemapRecursion:
parseSitemap(url, retVal)
else:
retVal.add(url)
except KeyboardInterrupt:
abortedFlag = True
warnMsg = "user aborted during sitemap parsing. sqlmap "
warnMsg += "will use partial list"
logger.warn(warnMsg)
return retVal
示例7: getsource
# 需要导入模块: import httplib [as 别名]
# 或者: from httplib import InvalidURL [as 别名]
def getsource(site):
if site[:7] != "http://":
site = "http://"+site
try:
site = urllib.urlopen(site).readlines()
return site
except(httplib.InvalidURL):
site = []
return site
示例8: tester
# 需要导入模块: import httplib [as 别名]
# 或者: from httplib import InvalidURL [as 别名]
def tester(target):
if verbose ==1:
if message != "":
print "Target:",target.replace("D3HYDR8%2D0wNz%2DY0U",message)
else:
print "Target:",target
try:
source = urllib2.urlopen("http://"+target).read()
h = httplib.HTTPConnection(target.split('/')[0])
try:
h.request("GET", "/"+target.split('/',1)[1])
except(IndexError):
h.request("GET", "/")
r1 = h.getresponse()
if verbose ==1:
print "\t[+] Response:",r1.status, r1.reason
if re.search(alert.replace("%2D","-"), source) != None and r1.status not in range(303, 418):
if target not in found_xss:
if message != "":
print "\n[!] XSS:", target.replace("D3HYDR8%2D0wNz%2DY0U",message)
else:
print "\n[!] XSS:", target
print "\t[+] Response:",r1.status, r1.reason
emails = getemails(target)
if emails:
print "\t[+] Email:",len(emails),"addresses\n"
found_xss.setdefault(target, list(sets.Set(emails)))
else:
found_xss[target] = "None"
except(socket.timeout, socket.gaierror, socket.error, IOError, ValueError, httplib.BadStatusLine, httplib.IncompleteRead, httplib.InvalidURL):
pass
except():
pass
示例9: tester
# 需要导入模块: import httplib [as 别名]
# 或者: from httplib import InvalidURL [as 别名]
def tester(victim):
if verbose ==1:
print "Target:",victim
try:
source = urllib2.urlopen(proto+"://"+victim, port).read()
h = httplib.HTTPConnection(victim.split('/')[0], int(port))
try:
h.request("GET", "/"+victim.split('/',1)[1])
except(IndexError):
h.request("GET", "/")
r1 = h.getresponse()
if verbose ==1:
print "\t[+] Response:",r1.status, r1.reason
if re.search(title, source) != None and r1.status not in range(303, 418):
if victim not in found_rfi:
print "\n[!] RFI:", victim
print "\t[+] Response:",r1.status, r1.reason
found_rfi.append(victim)
except(socket.timeout, socket.gaierror, socket.error, IOError, ValueError, httplib.BadStatusLine, httplib.IncompleteRead, httplib.InvalidURL):
pass
except(KeyboardInterrupt):
print "\n[-] Cancelled -",timer(),"\n"
sys.exit(1)
except():
pass
示例10: crawl
# 需要导入模块: import httplib [as 别名]
# 或者: from httplib import InvalidURL [as 别名]
def crawl(target):
try:
visited = set()
threadData = getCurrentThreadData()
threadData.shared.value = oset()
def crawlThread():
threadData = getCurrentThreadData()
while kb.threadContinue:
with kb.locks.limit:
if threadData.shared.unprocessed:
current = threadData.shared.unprocessed.pop()
if current in visited:
continue
elif conf.crawlExclude and re.search(conf.crawlExclude, current):
dbgMsg = "skipping '%s'" % current
logger.debug(dbgMsg)
continue
else:
visited.add(current)
else:
break
content = None
try:
if current:
content = Request.getPage(url=current, crawling=True, raise404=False)[0]
except SqlmapConnectionException, ex:
errMsg = "connection exception detected ('%s'). skipping " % getSafeExString(ex)
errMsg += "URL '%s'" % current
logger.critical(errMsg)
except SqlmapSyntaxException:
errMsg = "invalid URL detected. skipping '%s'" % current
logger.critical(errMsg)
except httplib.InvalidURL, ex:
errMsg = "invalid URL detected ('%s'). skipping " % getSafeExString(ex)
errMsg += "URL '%s'" % current
logger.critical(errMsg)
示例11: parseSitemap
# 需要导入模块: import httplib [as 别名]
# 或者: from httplib import InvalidURL [as 别名]
def parseSitemap(url, retVal=None):
global abortedFlag
if retVal is not None:
logger.debug("parsing sitemap '%s'" % url)
try:
if retVal is None:
abortedFlag = False
retVal = oset()
try:
content = Request.getPage(url=url, raise404=True)[0] if not abortedFlag else ""
except httplib.InvalidURL:
errMsg = "invalid URL given for sitemap ('%s')" % url
raise SqlmapSyntaxException(errMsg)
for match in re.finditer(r"<loc>\s*([^<]+)", content or ""):
if abortedFlag:
break
url = match.group(1).strip()
if url.endswith(".xml") and "sitemap" in url.lower():
if kb.followSitemapRecursion is None:
message = "sitemap recursion detected. Do you want to follow? [y/N] "
kb.followSitemapRecursion = readInput(message, default='N', boolean=True)
if kb.followSitemapRecursion:
parseSitemap(url, retVal)
else:
retVal.add(url)
except KeyboardInterrupt:
abortedFlag = True
warnMsg = "user aborted during sitemap parsing. sqlmap "
warnMsg += "will use partial list"
logger.warn(warnMsg)
return retVal
示例12: sendRequest
# 需要导入模块: import httplib [as 别名]
# 或者: from httplib import InvalidURL [as 别名]
def sendRequest(self, session, verb, url, postget=False):
if (postget or verb == 'POST' or verb == 'PUT') and '??' in url:
url, data = url.split(u'??', 1)
try:
data = data.encode('utf-8', 'replace')
except UnicodeEncodeError:
pass
if postget:
data = None
else:
data = None
req = requests.Request(verb, url, data=data)
try:
prepped = session.prepare_request(req)
except httplib.InvalidURL as e:
err = True
msg = unicode(e, 'utf-8')
return err, msg
if verb == 'POST' or verb == 'PUT':
prepped.headers['Content-Type'] = self.taskKwargs['content-type']
try:
pu = prepped.url.decode('utf-8')
except (AttributeError, UnicodeDecodeError):
pu = u''
try:
pb = prepped.body.decode('utf-8')
except (AttributeError, UnicodeDecodeError):
pb = u''
msg = u'Prepped URL: %s\nBody: %s' % (pu, pb)
sys.exc_clear()
try:
resp = session.send(prepped, timeout=20)
msg += u'\nStatus: %s' % resp.status_code
resp.raise_for_status()
err = False
if resp.text == '':
respmsg = u'No response received'
else:
respmsg = resp.text.decode('unicode_escape', 'ignore')
msg += u'\nResponse for %s: %s' %(verb, respmsg)
resp.close()
except requests.ConnectionError:
err = True
msg = _(u'Requests Connection Error')
except requests.HTTPError as e:
err = True
msg = u'%s: %s' %(_(u'Requests HTTPError'), str(e))
except requests.URLRequired as e:
err = True
msg = u'%s: %s' %(_(u'Requests URLRequired Error'), str(e))
except requests.Timeout as e:
err = True
msg = u'%s: %s' %(_(u'Requests Timeout Error'), str(e))
except requests.RequestException as e:
err = True
msg = u'%s: %s' %(_(u'Generic Requests Error'), str(e))
except urllib2.HTTPError, e:
err = True
msg = _(u'HTTPError = ') + unicode(e.code)