本文整理汇总了Python中w3af.core.data.db.disk_set.DiskSet类的典型用法代码示例。如果您正苦于以下问题:Python DiskSet类的具体用法?Python DiskSet怎么用?Python DiskSet使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了DiskSet类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: test_add_HTTPPostDataRequest
def test_add_HTTPPostDataRequest(self):
ds = DiskSet()
uri = URL("http://w3af.org/?id=2")
hdr = Headers([("Referer", "http://w3af.org/")])
pdr1 = HTTPPostDataRequest(uri, method="GET", headers=hdr)
uri = URL("http://w3af.org/?id=3")
pdr2 = HTTPPostDataRequest(uri, method="GET", headers=hdr)
uri = URL("http://w3af.org/?id=7")
pdr3 = HTTPPostDataRequest(uri, method="FOO", headers=hdr)
ds.add(pdr1)
ds.add(pdr2)
ds.add(pdr2)
ds.add(pdr1)
self.assertEqual(ds[0], pdr1)
self.assertEqual(ds[1], pdr2)
self.assertFalse(pdr3 in ds)
self.assertTrue(pdr2 in ds)
self.assertEqual(len(ds), 2)
# This forces an internal change in the URL object
pdr2.get_url().url_string
self.assertTrue(pdr2 in ds)
示例2: test_add_QsRequest
def test_add_QsRequest(self):
ds = DiskSet()
uri = URL('http://w3af.org/?id=2')
hdr = Headers([('Referer', 'http://w3af.org/')])
qsr1 = FuzzableRequest(uri, method='GET', headers=hdr)
uri = URL('http://w3af.org/?id=3')
qsr2 = FuzzableRequest(uri, method='GET', headers=hdr)
uri = URL('http://w3af.org/?id=7')
qsr3 = FuzzableRequest(uri, method='FOO', headers=hdr)
ds.add(qsr1)
ds.add(qsr2)
ds.add(qsr2)
ds.add(qsr1)
self.assertEqual(ds[0], qsr1)
self.assertEqual(ds[1], qsr2)
self.assertFalse(qsr3 in ds)
self.assertTrue(qsr2 in ds)
self.assertEqual(len(ds), 2)
# This forces an internal change in the URL object
qsr2.get_url().url_string
self.assertIn(qsr2, ds)
示例3: dot_ds_store
class dot_ds_store(CrawlPlugin):
"""
Search .DS_Store file and checks for files containing.
:author: Tomas Velazquez ( [email protected] )
:author: Andres Riancho ( [email protected] )
:credits: This code was based in cpan Mac::Finder::DSStore by Wim Lewis ( [email protected] )
"""
DS_STORE = '.DS_Store'
def __init__(self):
CrawlPlugin.__init__(self)
# Internal variables
self._analyzed_dirs = DiskSet()
def crawl(self, fuzzable_request):
"""
For every directory, fetch a list of files and analyze the response.
:parameter fuzzable_request: A fuzzable_request instance that contains
(among other things) the URL to test.
"""
directories_to_check = []
for domain_path in fuzzable_request.get_url().get_directories():
if domain_path not in self._analyzed_dirs:
self._analyzed_dirs.add(domain_path)
directories_to_check.append(domain_path)
# Send the requests using threads
self.worker_pool.map(self._check_and_analyze, directories_to_check)
def _check_and_analyze(self, domain_path):
"""
Check if a .DS_Store filename exists in the domain_path.
:return: None, everything is saved to the self.out_queue.
"""
# Request the file
url = domain_path.url_join(self.DS_STORE)
try:
response = self.http_get_and_parse(url, binary_response=True)
except BaseFrameworkException, w3:
msg = 'Failed to GET .DS_Store file: %s. Exception: %s.'
om.out.debug(msg, (url, w3))
return
# Check if it's a .DS_Store file
if is_404(response):
return
try:
store = DsStore(response.get_raw_body())
entries = store.get_file_entries()
except Exception, e:
om.out.debug('Unexpected error while parsing DS_Store file: "%s"' % e)
return
示例4: setup
def setup(self):
"""
Setup all the required backend stores. This was mostly created to avoid
starting any threads during __init__() which is called during python's
import phase and dead-locks in some cases.
:return: None
"""
with self._kb_lock:
if self.initialized:
return
self.urls = DiskSet(table_prefix='kb_urls')
self.fuzzable_requests = DiskSet(table_prefix='kb_fuzzable_requests')
self.db = get_default_persistent_db_instance()
self.table_name = 'knowledge_base_' + rand_alpha(30)
self.db.create_table(self.table_name, self.COLUMNS)
self.db.create_index(self.table_name, ['location_a', 'location_b'])
self.db.create_index(self.table_name, ['uniq_id'])
self.db.commit()
# Only initialize once
self.initialized = True
示例5: dwsync_xml
class dwsync_xml(CrawlPlugin):
"""
Search Dream Waver Sync file (dwsync.xml) and extract referenced files.
:author: Tomas Velazquez ([email protected])
"""
DWSYNC = '_notes/dwsync.xml'
def __init__(self):
CrawlPlugin.__init__(self)
# Internal variables
self._analyzed_dirs = DiskSet()
def crawl(self, fuzzable_request):
"""
For every directory, fetch a list of files and analyze the response.
:parameter fuzzable_request: A fuzzable_request instance that contains
(among other things) the URL to test.
"""
for domain_path in fuzzable_request.get_url().get_directories():
if domain_path not in self._analyzed_dirs:
self._analyzed_dirs.add(domain_path)
self._find_dwsync(domain_path)
def _find_dwsync(self, domain_path):
dwsync_url = domain_path.url_join(self.DWSYNC)
response = self.http_get_and_parse(dwsync_url)
if is_404(response):
return
if '</dwsync>' not in response.get_body():
return
om.out.debug('Parsing dwsync.xml file at %s' % dwsync_url)
try:
dom = xml.dom.minidom.parseString(response.get_body())
except Exception, e:
msg = 'Exception while parsing dwsync.xml file at %s : "%s"'
om.out.debug(msg % (dwsync_url, e))
return
parsed_url_list = set()
for file_entry in dom.getElementsByTagName('file'):
try:
_file = file_entry.getAttribute('name')
url = domain_path.url_join(_file)
parsed_url_list.add(url)
except ValueError, ve:
msg = 'dwsync file had an invalid URL: "%s"'
om.out.debug(msg % ve)
except Exception, e:
msg = 'Sitemap file had an invalid format: "%s"'
om.out.debug(msg % e)
示例6: test_add
def test_add(self):
ds = DiskSet()
ds.add(1)
ds.add(2)
ds.add(3)
ds.add(1)
self.assertEqual(list(ds), [1, 2, 3])
self.assertEqual(len(ds), 3)
self.assertEqual(unicode(ds), u'<DiskSet [1, 2, 3]>')
示例7: test_disk_set
def test_disk_set(self):
ds = DiskSet()
for i in xrange(20000):
data = (i, i)
ds.add(data)
for i in xrange(20000):
data = (i, i)
data in ds
示例8: test_add_urlobject
def test_add_urlobject(self):
ds = DiskSet()
ds.add(URL('http://w3af.org/?id=2'))
ds.add(URL('http://w3af.org/?id=3'))
ds.add(URL('http://w3af.org/?id=3'))
self.assertEqual(ds[0], URL('http://w3af.org/?id=2'))
self.assertEqual(ds[1], URL('http://w3af.org/?id=3'))
self.assertEqual(len(ds), 2)
self.assertFalse(URL('http://w3af.org/?id=4') in ds)
self.assertTrue(URL('http://w3af.org/?id=2') in ds)
示例9: test_table_name_with_prefix
def test_table_name_with_prefix(self):
_unittest = 'unittest'
disk_set = DiskSet(_unittest)
self.assertIn(_unittest, disk_set.table_name)
db = get_default_temp_db_instance()
self.assertTrue(db.table_exists(disk_set.table_name))
disk_set.cleanup()
self.assertFalse(db.table_exists(disk_set.table_name))
示例10: test_remove_table
def test_remove_table(self):
disk_set = DiskSet()
disk_set.add(1)
disk_set.add(2)
table_name = disk_set.table_name
db = get_default_temp_db_instance()
self.assertTrue(db.table_exists(table_name))
disk_set.cleanup()
self.assertFalse(db.table_exists(table_name))
示例11: phpinfo
class phpinfo(CrawlPlugin):
"""
Search PHP Info file and if it finds it will determine the version of PHP.
:author: Viktor Gazdag ( [email protected] )
"""
"""
CHANGELOG:
Feb/17/2009- Added PHP Settings Audit Checks by Aung Khant (aungkhant[at]yehg.net)
"""
def __init__(self):
CrawlPlugin.__init__(self)
# Internal variables
self._analyzed_dirs = DiskSet()
self._has_audited = 0
def crawl(self, fuzzable_request):
"""
For every directory, fetch a list of files and analyze the response.
:param fuzzable_request: A fuzzable_request instance that contains
(among other things) the URL to test.
"""
for domain_path in fuzzable_request.get_url().get_directories():
if domain_path in self._analyzed_dirs:
continue
self._analyzed_dirs.add(domain_path)
url_repeater = repeat(domain_path)
args = izip(url_repeater, self._get_potential_phpinfos())
self.worker_pool.map_multi_args(self._check_and_analyze, args)
def _check_and_analyze(self, domain_path, php_info_filename):
"""
Check if a php_info_filename exists in the domain_path.
:return: None, everything is put() into the self.output_queue.
"""
# Request the file
php_info_url = domain_path.url_join(php_info_filename)
try:
response = self._uri_opener.GET(php_info_url, cache=True)
except BaseFrameworkException, w3:
msg = 'Failed to GET phpinfo file: "%s". Exception: "%s".'
om.out.debug(msg % (php_info_url, w3))
else:
示例12: cleanup
def cleanup(self):
"""
Cleanup internal data.
"""
self.db.execute("DELETE FROM %s WHERE 1=1" % self.table_name)
# Remove the old, create new.
self.urls.cleanup()
self.urls = DiskSet(table_prefix='kb_urls')
self.fuzzable_requests.cleanup()
self.fuzzable_requests = DiskSet(table_prefix='kb_fuzzable_requests')
self.observers.clear()
示例13: __init__
def __init__(self):
super(ParserCache, self).__init__()
self._cache = SynchronizedLRUDict(self.CACHE_SIZE)
self._can_parse_cache = SynchronizedLRUDict(self.CACHE_SIZE * 10)
self._parser_finished_events = {}
self._parser_blacklist = DiskSet()
示例14: test_store_fuzzable_request
def test_store_fuzzable_request(self):
form_params = FormParameters()
form_params.add_input([("name", "username"), ("value", "abc")])
form_params.add_input([("name", "address"), ("value", "")])
form_params.set_action(URL('http://example.com/?id=1'))
form_params.set_method('post')
form = dc_from_form_params(form_params)
fr = FuzzableRequest.from_form(form)
ds = DiskSet()
ds.add(fr)
stored_fr = ds[0]
self.assertEqual(stored_fr, fr)
self.assertIsNot(stored_fr, fr)
示例15: __init__
def __init__(self):
"""
CHANGELOG:
Feb/17/2009- Added PHP Settings Audit Checks by Aung Khant
(aungkhant[at]yehg.net)
"""
CrawlPlugin.__init__(self)
# Internal variables
self._analyzed_dirs = DiskSet()
self._has_audited = 0