当前位置: 首页>>代码示例>>Python>>正文


Python ExtendedUrllib.end方法代码示例

本文整理汇总了Python中w3af.core.data.url.extended_urllib.ExtendedUrllib.end方法的典型用法代码示例。如果您正苦于以下问题:Python ExtendedUrllib.end方法的具体用法?Python ExtendedUrllib.end怎么用?Python ExtendedUrllib.end使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在w3af.core.data.url.extended_urllib.ExtendedUrllib的用法示例。


在下文中一共展示了ExtendedUrllib.end方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: TestXUrllibTimeout

# 需要导入模块: from w3af.core.data.url.extended_urllib import ExtendedUrllib [as 别名]
# 或者: from w3af.core.data.url.extended_urllib.ExtendedUrllib import end [as 别名]
class TestXUrllibTimeout(unittest.TestCase):

    def setUp(self):
        self.uri_opener = ExtendedUrllib()

    def tearDown(self):
        self.uri_opener.end()

    def test_timeout(self):
        upper_daemon = UpperDaemon(TimeoutTCPHandler)
        upper_daemon.start()
        upper_daemon.wait_for_start()

        port = upper_daemon.get_port()

        url = URL('http://127.0.0.1:%s/' % port)

        self.uri_opener.settings.set_configured_timeout(0.5)
        self.uri_opener.clear_timeout()
        # We can mock this because it's being tested at TestXUrllibDelayOnError
        self.uri_opener._pause_on_http_error = Mock()
        start = time.time()

        try:
            self.uri_opener.GET(url)
        except HTTPRequestException, hre:
            self.assertEqual(hre.message, 'HTTP timeout error')
        except Exception, e:
            msg = 'Not expecting: "%s"'
            self.assertTrue(False, msg % e.__class__.__name__)
开发者ID:0x554simon,项目名称:w3af,代码行数:32,代码来源:test_xurllib_timeout.py

示例2: Generic404Test

# 需要导入模块: from w3af.core.data.url.extended_urllib import ExtendedUrllib [as 别名]
# 或者: from w3af.core.data.url.extended_urllib.ExtendedUrllib import end [as 别名]
class Generic404Test(unittest.TestCase):

    def get_body(self, unique_parts):
        # Do not increase this 50 too much, it will exceed the xurllib max
        # HTTP response body length
        parts = [re.__doc__, random.__doc__, unittest.__doc__]
        parts = parts * 50

        parts.extend(unique_parts)

        rnd = random.Random()
        rnd.seed(1)
        rnd.shuffle(parts)

        body = '\n'.join(parts)

        # filename = str(abs(hash(''.join(parts)))) + '-hash.txt'
        # file(filename, 'w').write(body)

        return body

    def setUp(self):
        self.urllib = ExtendedUrllib()

        self.fingerprint_404 = Fingerprint404()
        self.fingerprint_404.set_url_opener(self.urllib)

    def tearDown(self):
        self.urllib.end()
        clear_default_temp_db_instance()
开发者ID:andresriancho,项目名称:w3af,代码行数:32,代码来源:test_fingerprint_404.py

示例3: TestExtendedUrllibProxy

# 需要导入模块: from w3af.core.data.url.extended_urllib import ExtendedUrllib [as 别名]
# 或者: from w3af.core.data.url.extended_urllib.ExtendedUrllib import end [as 别名]
class TestExtendedUrllibProxy(unittest.TestCase):

    MOTH_MESSAGE = '<title>moth: vulnerable web application</title>'

    def setUp(self):
        self.uri_opener = ExtendedUrllib()
        
        # Start the proxy daemon
        self._proxy = Proxy('127.0.0.1', 0, ExtendedUrllib(), w3afProxyHandler)
        self._proxy.start()
        self._proxy.wait_for_start()
        
        port = self._proxy.get_port()
        
        # Configure the proxy
        settings = OpenerSettings()
        options = settings.get_options()
        proxy_address_opt = options['proxy_address']
        proxy_port_opt = options['proxy_port']
        
        proxy_address_opt.set_value('127.0.0.1') 
        proxy_port_opt.set_value(port)
        
        settings.set_options(options)
        self.uri_opener.settings = settings
    
    def tearDown(self):
        self.uri_opener.end()
        
    def test_http_default_port_via_proxy(self):
        url = URL(get_moth_http())
        http_response = self.uri_opener.GET(url, cache=False)
        self.assertIn(self.MOTH_MESSAGE, http_response.body)

    def test_http_port_specification_via_proxy(self):
        url = URL(get_moth_http())
        http_response = self.uri_opener.GET(url, cache=False)
        self.assertIn(self.MOTH_MESSAGE, http_response.body)

    def test_https_via_proxy(self):
        TODO = 'Skip this test because of a strange bug with the extended'\
               ' url library and w3af\'s local proxy daemon. More info here:'\
               ' https://github.com/andresriancho/w3af/issues/183'
        raise SkipTest(TODO)
    
        url = URL(get_moth_https())
        http_response = self.uri_opener.GET(url, cache=False)
        self.assertIn(self.MOTH_MESSAGE, http_response.body)

    def test_offline_port_via_proxy(self):
        url = URL('http://127.0.0.1:8181/')
        http_response = self.uri_opener.GET(url, cache=False)
        self.assertEqual(http_response.get_code(), 400)
    
    def test_POST_via_proxy(self):
        url = URL(get_moth_http('/audit/xss/simple_xss_form.py'))
        http_response = self.uri_opener.POST(url, data='text=123456abc', cache=False)
        self.assertIn('123456abc', http_response.body)
开发者ID:3rdDegree,项目名称:w3af,代码行数:60,代码来源:test_xurllib_proxy.py

示例4: test_pickleable_shells

# 需要导入模块: from w3af.core.data.url.extended_urllib import ExtendedUrllib [as 别名]
# 或者: from w3af.core.data.url.extended_urllib.ExtendedUrllib import end [as 别名]
 def test_pickleable_shells(self):
     pool = Pool(1)
     xurllib = ExtendedUrllib()
     
     original_shell = Shell(MockVuln(), xurllib, pool)
     
     kb.append('a', 'b', original_shell)
     unpickled_shell = kb.get('a', 'b')[0]
     
     self.assertEqual(original_shell, unpickled_shell)
     self.assertEqual(unpickled_shell.worker_pool, None)
     self.assertEqual(unpickled_shell._uri_opener, None)
     
     pool.terminate()
     pool.join()
     xurllib.end()
开发者ID:ElAleyo,项目名称:w3af,代码行数:18,代码来源:test_knowledge_base.py

示例5: TestXUrllib

# 需要导入模块: from w3af.core.data.url.extended_urllib import ExtendedUrllib [as 别名]
# 或者: from w3af.core.data.url.extended_urllib.ExtendedUrllib import end [as 别名]
class TestXUrllib(unittest.TestCase):

    MOTH_MESSAGE = '<title>moth: vulnerable web application</title>'
    MOCK_URL = 'http://www.w3af.org/'

    def setUp(self):
        self.uri_opener = ExtendedUrllib()
    
    def tearDown(self):
        self.uri_opener.end()
        httpretty.reset()
        
    def test_basic(self):
        url = URL(get_moth_http())
        http_response = self.uri_opener.GET(url, cache=False)
        
        self.assertIn(self.MOTH_MESSAGE, http_response.body)
        
        self.assertGreaterEqual(http_response.id, 1)
        self.assertNotEqual(http_response.id, None)

    def test_basic_ssl(self):
        url = URL(get_moth_https())
        http_response = self.uri_opener.GET(url, cache=False)

        self.assertIn(self.MOTH_MESSAGE, http_response.body)

        self.assertGreaterEqual(http_response.id, 1)
        self.assertNotEqual(http_response.id, None)

    def test_cache(self):
        url = URL(get_moth_http())
        http_response = self.uri_opener.GET(url)
        self.assertIn(self.MOTH_MESSAGE, http_response.body)

        url = URL(get_moth_http())
        http_response = self.uri_opener.GET(url)
        self.assertIn(self.MOTH_MESSAGE, http_response.body)

    def test_qs_params(self):
        url = URL(get_moth_http('/audit/xss/simple_xss.py?text=123456abc'))
        http_response = self.uri_opener.GET(url, cache=False)
        self.assertIn('123456abc', http_response.body)

        url = URL(get_moth_http('/audit/xss/simple_xss.py?text=root:x:0'))
        http_response = self.uri_opener.GET(url, cache=False)
        self.assertIn('root:x:0', http_response.body)

    @httpretty.activate
    def test_GET_with_post_data(self):
        httpretty.register_uri(httpretty.GET, self.MOCK_URL,
                               body=self.MOTH_MESSAGE, status=200)

        mock_url = URL(self.MOCK_URL)
        data = 'abc=123&def=456'
        response = self.uri_opener.GET(mock_url, data=data)

        # Check the response
        self.assertEqual(response.get_code(), 200)
        self.assertEqual(response.get_body(), self.MOTH_MESSAGE)

        # And use httpretty to check the request
        self.assertEqual(httpretty.last_request().method, 'GET')

        request_headers = httpretty.last_request().headers
        self.assertIn('content-length', request_headers)
        self.assertEqual(str(len(data)), request_headers['content-length'])

        self.assertEqual(httpretty.last_request().body, data)
        self.assertEqual(httpretty.last_request().path, '/')

    @httpretty.activate
    def test_GET_with_post_data_and_qs(self):
        httpretty.register_uri(httpretty.GET, self.MOCK_URL,
                               body=self.MOTH_MESSAGE, status=200)

        qs = '?qs=1'
        mock_url = URL(self.MOCK_URL + qs)
        data = 'abc=123&def=456'
        response = self.uri_opener.GET(mock_url, data=data)

        # Check the response
        self.assertEqual(response.get_code(), 200)
        self.assertEqual(response.get_body(), self.MOTH_MESSAGE)

        # And use httpretty to check the request
        self.assertEqual(httpretty.last_request().method, 'GET')

        request_headers = httpretty.last_request().headers
        self.assertIn('content-length', request_headers)
        self.assertEqual(str(len(data)), request_headers['content-length'])

        self.assertEqual(httpretty.last_request().body, data)
        self.assertEqual(httpretty.last_request().path, '/' + qs)

    def test_post(self):
        url = URL(get_moth_http('/audit/xss/simple_xss_form.py'))

        data = URLEncodedForm()
        data['text'] = ['123456abc']
#.........这里部分代码省略.........
开发者ID:0x554simon,项目名称:w3af,代码行数:103,代码来源:test_xurllib.py

示例6: BaseGoogleAPISearch

# 需要导入模块: from w3af.core.data.url.extended_urllib import ExtendedUrllib [as 别名]
# 或者: from w3af.core.data.url.extended_urllib.ExtendedUrllib import end [as 别名]
class BaseGoogleAPISearch(unittest.TestCase):
    """
    @see: test_GMobileSearch, test_GStandardSearch, test_GAjaxSearch below for
          tests on these particular search implementations.

    This base class is not intended to be run by nosetests.
    """
    GoogleApiSearcher = None

    COUNT = 10
    
    def setUp(self):
        self.opener = ExtendedUrllib()

    def tearDown(self):
        self.opener.end()

    def test_len_link_results(self):
        if self.GoogleApiSearcher is None:
            return
        
        keywords = ["pink", "red", "blue"]
        random.shuffle(keywords)
        query = ' '.join(keywords)
        start = 0
        # pylint: disable=E1102
        # E1102: self.GoogleApiSearcher is not callable
        searcher = self.GoogleApiSearcher(self.opener, query, start, self.COUNT)

        self.assertEqual(searcher.status, IS_NEW)

        # This actually does the search
        searcher.links

        msg = 'This test fails randomly based on Google\'s anti automation' \
              ' protection, if it fails you should run it again in a couple'\
              ' of minutes. Many consecutive failures show that our code is'\
              ' NOT working anymore.'
        self.assertEqual(searcher.status, FINISHED_OK, msg)

        link_list = '\n'.join(str(r) for r in searcher.links)
        msg = 'Got less results than expected, %s is less than %s:\n%s'
        msg = msg % (len(searcher.links), self.COUNT, link_list)
        self.assertGreaterEqual(len(searcher.links), self.COUNT, msg)

        for link in searcher.links:
            self.assertTrue(URL_REGEX.match(link.URL.url_string) is not None,
                            link.URL.url_string)

        for page in searcher.pages:
            self.assertTrue(isinstance(page, HTTPResponse))

        # Check that the links are related to my search
        related = 0
        for link in searcher.links:
            for key in keywords:
                if key in link.URL.url_string.lower():
                    related += 1

        self.assertTrue(related > 5, related)

    def test_links_results_domain(self):
        if self.GoogleApiSearcher is None:
            return
        
        domain = "www.bonsai-sec.com"
        query = "site:%s" % domain
        start = 0
        # pylint: disable=E1102
        # E1102: self.GoogleApiSearcher is not callable
        searcher = self.GoogleApiSearcher(self.opener, query, start, self.COUNT)

        self.assertEqual(searcher.status, IS_NEW)

        # This actually does the search
        searcher.links

        msg = 'This test fails randomly based on Google\'s anti automation' \
              ' protection, if it fails you should run it again in a couple of' \
              ' minutes. Many consecutive failures show that our code is NOT' \
              ' working anymore.'
        self.assertEqual(searcher.status, FINISHED_OK, msg)

        msg = 'Got less results than expected:\n%s' % '\n'.join(
            str(r) for r in searcher.links)
        self.assertEqual(len(searcher.links), self.COUNT, msg)

        for link in searcher.links:
            link_domain = link.URL.get_domain()
            msg = "Current link domain is '%s'. Expected: '%s'" % (
                link_domain, domain)
            self.assertEqual(link_domain, domain, msg)
开发者ID:Daisymei,项目名称:w3af,代码行数:94,代码来源:test_google.py

示例7: TestMultipartPostUpload

# 需要导入模块: from w3af.core.data.url.extended_urllib import ExtendedUrllib [as 别名]
# 或者: from w3af.core.data.url.extended_urllib.ExtendedUrllib import end [as 别名]
class TestMultipartPostUpload(unittest.TestCase):
    """
    In the new architecture I've been working on, the HTTP requests are almost
    completely created by serializing two objects:
        * FuzzableRequest
        * DataContainer (stored in FuzzableRequest._post_data)

    There is a special DataContainer sub-class for MultipartPost file uploads
    called MultipartContainer, which holds variables and files and when
    serialized will be encoded as multipart.

    These test cases try to make sure that the file upload feature works by
    sending a POST request with a MultipartContainer to moth.
    """
    MOTH_FILE_UP_URL = URL(get_moth_http('/core/file_upload/upload.py'))

    def setUp(self):
        self.opener = ExtendedUrllib()

    def tearDown(self):
        self.opener.end()

    def test_multipart_without_file(self):
        form_params = FormParameters()
        form_params.add_field_by_attr_items([('name', 'uploadedfile')])
        form_params['uploadedfile'][0] = 'this is not a file'
        form_params.add_field_by_attr_items([('name', 'MAX_FILE_SIZE'),
                       ('type', 'hidden'),
                       ('value', '10000')])

        mpc = MultipartContainer(form_params)

        resp = self.opener.POST(self.MOTH_FILE_UP_URL, data=str(mpc),
                                headers=Headers(mpc.get_headers()))

        self.assertNotIn('was successfully uploaded', resp.get_body())

    def test_file_upload(self):
        temp = tempfile.mkstemp(suffix=".tmp")
        os.write(temp[0], 'file content')

        _file = open(temp[1], "rb")
        self.upload_file(_file)

    def test_stringio_upload(self):
        _file = NamedStringIO('file content', name='test.txt')
        self.upload_file(_file)

    def upload_file(self, _file):
        form_params = FormParameters()
        form_params.add_field_by_attr_items([('name', 'uploadedfile')])
        form_params.add_field_by_attr_items([('name', 'MAX_FILE_SIZE'),
                               ('type', 'hidden'),
                               ('value', '10000')])

        mpc = MultipartContainer(form_params)
        mpc['uploadedfile'][0] = _file

        resp = self.opener.POST(self.MOTH_FILE_UP_URL, data=str(mpc),
                                headers=Headers(mpc.get_headers()))

        self.assertIn('was successfully uploaded', resp.get_body())

    def test_upload_file_using_fuzzable_request(self):
        form_params = FormParameters()
        form_params.add_field_by_attr_items([('name', 'uploadedfile')])
        form_params['uploadedfile'][0] = NamedStringIO('file content', name='test.txt')
        form_params.add_field_by_attr_items([('name', 'MAX_FILE_SIZE'),
                       ('type', 'hidden'),
                       ('value', '10000')])

        mpc = MultipartContainer(form_params)

        freq = FuzzableRequest(self.MOTH_FILE_UP_URL, post_data=mpc,
                               method='POST')

        resp = self.opener.send_mutant(freq)

        self.assertIn('was successfully uploaded', resp.get_body())
开发者ID:0x554simon,项目名称:w3af,代码行数:81,代码来源:test_multipart.py

示例8: TestXUrllib

# 需要导入模块: from w3af.core.data.url.extended_urllib import ExtendedUrllib [as 别名]
# 或者: from w3af.core.data.url.extended_urllib.ExtendedUrllib import end [as 别名]
class TestXUrllib(unittest.TestCase):

    MOTH_MESSAGE = '<title>moth: vulnerable web application</title>'

    def setUp(self):
        self.uri_opener = ExtendedUrllib()
    
    def tearDown(self):
        self.uri_opener.end()
        
    def test_basic(self):
        url = URL(get_moth_http())
        http_response = self.uri_opener.GET(url, cache=False)
        
        self.assertIn(self.MOTH_MESSAGE, http_response.body)
        
        self.assertGreaterEqual(http_response.id, 1)
        self.assertNotEqual(http_response.id, None)

    def test_cache(self):
        url = URL(get_moth_http())
        http_response = self.uri_opener.GET(url)
        self.assertIn(self.MOTH_MESSAGE, http_response.body)

        url = URL(get_moth_http())
        http_response = self.uri_opener.GET(url)
        self.assertIn(self.MOTH_MESSAGE, http_response.body)

    def test_qs_params(self):
        url = URL(get_moth_http('/audit/xss/simple_xss.py?text=123456abc'))
        http_response = self.uri_opener.GET(url, cache=False)
        self.assertIn('123456abc', http_response.body)

        url = URL(get_moth_http('/audit/xss/simple_xss.py?text=root:x:0'))
        http_response = self.uri_opener.GET(url, cache=False)
        self.assertIn('root:x:0', http_response.body)

    def test_POST(self):
        url = URL(get_moth_http('/audit/xss/simple_xss_form.py'))

        data = URLEncodedForm()
        data['text'] = ['123456abc']

        http_response = self.uri_opener.POST(url, data, cache=False)
        self.assertIn('123456abc', http_response.body)

    def test_POST_special_chars(self):
        url = URL(get_moth_http('/audit/xss/simple_xss_form.py'))
        test_data = u'abc<def>"-á-'

        data = URLEncodedForm()
        data['text'] = [test_data]

        http_response = self.uri_opener.POST(url, data, cache=False)
        self.assertIn(test_data, http_response.body)

    def test_unknown_url(self):
        url = URL('http://longsitethatdoesnotexistfoo.com/')
        self.assertRaises(ScanMustStopOnUrlError, self.uri_opener.GET, url)

    def test_url_port_closed(self):
        # TODO: Change 2312 by an always closed/non-http port
        url = URL('http://127.0.0.1:2312/')
        self.assertRaises(ScanMustStopOnUrlError, self.uri_opener.GET, url)

    def test_url_port_not_http(self):
        upper_daemon = UpperDaemon(EmptyTCPHandler)
        upper_daemon.start()
        upper_daemon.wait_for_start()

        port = upper_daemon.get_port()

        url = URL('http://127.0.0.1:%s/' % port)
        self.assertRaises(ScanMustStopOnUrlError, self.uri_opener.GET, url)

    def test_url_port_not_http_many(self):
        upper_daemon = UpperDaemon(EmptyTCPHandler)
        upper_daemon.start()
        upper_daemon.wait_for_start()

        port = upper_daemon.get_port()

        url = URL('http://127.0.0.1:%s/' % port)
        for _ in xrange(MAX_ERROR_COUNT):
            try:
                self.uri_opener.GET(url)
            except ScanMustStopByUnknownReasonExc:
                self.assertTrue(False, 'Not expecting this exception type.')
            except ScanMustStopOnUrlError:
                self.assertTrue(True)
            except ScanMustStopException:
                self.assertTrue(True)
                break
        else:
            self.assertTrue(False)

    def test_timeout(self):
        upper_daemon = UpperDaemon(TimeoutTCPHandler)
        upper_daemon.start()
        upper_daemon.wait_for_start()
#.........这里部分代码省略.........
开发者ID:ElAleyo,项目名称:w3af,代码行数:103,代码来源:test_xurllib.py

示例9: TestXUrllib

# 需要导入模块: from w3af.core.data.url.extended_urllib import ExtendedUrllib [as 别名]
# 或者: from w3af.core.data.url.extended_urllib.ExtendedUrllib import end [as 别名]
class TestXUrllib(unittest.TestCase):

    MOTH_MESSAGE = '<title>moth: vulnerable web application</title>'

    def setUp(self):
        self.uri_opener = ExtendedUrllib()
    
    def tearDown(self):
        self.uri_opener.end()
        
    def test_basic(self):
        url = URL(get_moth_http())
        http_response = self.uri_opener.GET(url, cache=False)
        
        self.assertIn(self.MOTH_MESSAGE, http_response.body)
        
        self.assertGreaterEqual(http_response.id, 1)
        self.assertNotEqual(http_response.id, None)

    def test_cache(self):
        url = URL(get_moth_http())
        http_response = self.uri_opener.GET(url)
        self.assertIn(self.MOTH_MESSAGE, http_response.body)

        url = URL(get_moth_http())
        http_response = self.uri_opener.GET(url)
        self.assertIn(self.MOTH_MESSAGE, http_response.body)

    def test_qs_params(self):
        url = URL(get_moth_http('/audit/xss/simple_xss.py?text=123456abc'))
        http_response = self.uri_opener.GET(url, cache=False)
        self.assertIn('123456abc', http_response.body)

        url = URL(get_moth_http('/audit/xss/simple_xss.py?text=root:x:0'))
        http_response = self.uri_opener.GET(url, cache=False)
        self.assertIn('root:x:0', http_response.body)

    def test_post(self):
        url = URL(get_moth_http('/audit/xss/simple_xss_form.py'))

        data = URLEncodedForm()
        data['text'] = ['123456abc']

        http_response = self.uri_opener.POST(url, data, cache=False)
        self.assertIn('123456abc', http_response.body)

    def test_post_special_chars(self):
        url = URL(get_moth_http('/audit/xss/simple_xss_form.py'))
        test_data = u'abc<def>"-á-'

        data = URLEncodedForm()
        data['text'] = [test_data]

        http_response = self.uri_opener.POST(url, data, cache=False)
        self.assertIn(test_data, http_response.body)

    def test_unknown_domain(self):
        url = URL('http://longsitethatdoesnotexistfoo.com/')
        self.assertRaises(HTTPRequestException, self.uri_opener.GET, url)

    def test_file_proto(self):
        url = URL('file://foo/bar.txt')
        self.assertRaises(HTTPRequestException, self.uri_opener.GET, url)

    def test_url_port_closed(self):
        # TODO: Change 2312 by an always closed/non-http port
        url = URL('http://127.0.0.1:2312/')
        self.assertRaises(HTTPRequestException, self.uri_opener.GET, url)

    def test_url_port_not_http(self):
        upper_daemon = UpperDaemon(EmptyTCPHandler)
        upper_daemon.start()
        upper_daemon.wait_for_start()

        port = upper_daemon.get_port()

        url = URL('http://127.0.0.1:%s/' % port)

        try:
            self.uri_opener.GET(url)
        except HTTPRequestException, hre:
            self.assertEqual(hre.value, "Bad HTTP response status line: ''")
        else:
开发者ID:andresriancho,项目名称:w3af-kali,代码行数:85,代码来源:test_xurllib.py

示例10: TestExtendedUrllibProxy

# 需要导入模块: from w3af.core.data.url.extended_urllib import ExtendedUrllib [as 别名]
# 或者: from w3af.core.data.url.extended_urllib.ExtendedUrllib import end [as 别名]
class TestExtendedUrllibProxy(unittest.TestCase):

    MOTH_MESSAGE = '<title>moth: vulnerable web application</title>'

    def setUp(self):
        self.uri_opener = ExtendedUrllib()
        
        # Start the proxy daemon
        self._proxy = Proxy('127.0.0.2', 0, ExtendedUrllib(), ProxyHandler)
        self._proxy.start()
        self._proxy.wait_for_start()
        
        port = self._proxy.get_port()
        
        # Configure the proxy
        settings = OpenerSettings()
        options = settings.get_options()
        proxy_address_opt = options['proxy_address']
        proxy_port_opt = options['proxy_port']
        
        proxy_address_opt.set_value('127.0.0.2')
        proxy_port_opt.set_value(port)
        
        settings.set_options(options)
        self.uri_opener.settings = settings
    
    def tearDown(self):
        self.uri_opener.end()
        
    def test_http_default_port_via_proxy(self):
        # TODO: Write this test
        pass

    def test_http_port_specification_via_proxy(self):
        self.assertEqual(self._proxy.total_handled_requests, 0)

        url = URL(get_moth_http())
        http_response = self.uri_opener.GET(url, cache=False)

        self.assertIn(self.MOTH_MESSAGE, http_response.body)
        self.assertEqual(self._proxy.total_handled_requests, 1)

    def test_https_via_proxy(self):
        self.assertEqual(self._proxy.total_handled_requests, 0)

        url = URL(get_moth_https())
        http_response = self.uri_opener.GET(url, cache=False)

        self.assertIn(self.MOTH_MESSAGE, http_response.body)
        self.assertEqual(self._proxy.total_handled_requests, 1)

    def test_offline_port_via_proxy(self):
        url = URL('http://127.0.0.1:8181/')
        http_response = self.uri_opener.GET(url, cache=False)
        self.assertEqual(http_response.get_code(), 500)
        self.assertIn('Connection refused', http_response.body)
    
    def test_POST_via_proxy(self):
        url = URL(get_moth_http('/audit/xss/simple_xss_form.py'))
        http_response = self.uri_opener.POST(url, data='text=123456abc', cache=False)
        self.assertIn('123456abc', http_response.body)
开发者ID:0x554simon,项目名称:w3af,代码行数:63,代码来源:test_xurllib_proxy.py

示例11: TestXUrllibDelayOnError

# 需要导入模块: from w3af.core.data.url.extended_urllib import ExtendedUrllib [as 别名]
# 或者: from w3af.core.data.url.extended_urllib.ExtendedUrllib import end [as 别名]
class TestXUrllibDelayOnError(unittest.TestCase):

    def setUp(self):
        self.uri_opener = ExtendedUrllib()

    def tearDown(self):
        self.uri_opener.end()

    def test_increasing_delay_on_errors(self):
        expected_log = {0: False, 70: False, 40: False, 10: False, 80: False,
                        50: False, 20: False, 90: False, 60: False, 30: False,
                        100: False}
        self.assertEqual(self.uri_opener._sleep_log, expected_log)

        return_empty_daemon = UpperDaemon(EmptyTCPHandler)
        return_empty_daemon.start()
        return_empty_daemon.wait_for_start()

        port = return_empty_daemon.get_port()

        # No retries means that the test is easier to read/understand
        self.uri_opener.settings.set_max_http_retries(0)

        # We want to keep going, don't test the _should_stop_scan here.
        self.uri_opener._should_stop_scan = lambda x: False

        url = URL('http://127.0.0.1:%s/' % port)
        http_exception_count = 0
        loops = 100

        # Not check the delays
        with patch('w3af.core.data.url.extended_urllib.time.sleep') as sleepm:
            for i in xrange(loops):
                try:
                    self.uri_opener.GET(url, cache=False)
                except HTTPRequestException:
                    http_exception_count += 1
                except Exception, e:
                    msg = 'Not expecting: "%s"'
                    self.assertTrue(False, msg % e.__class__.__name__)
                else:
                    self.assertTrue(False, 'Expecting HTTPRequestException')

            self.assertEqual(loops - 1, i)

            # Note that the timeouts are increasing based on the error rate and
            # SOCKET_ERROR_DELAY
            expected_calls = [call(1.5),
                              call(3.0),
                              call(4.5),
                              call(6.0),
                              call(7.5),
                              call(9.0),
                              call(10.5),
                              call(12.0),
                              call(13.5)]

            expected_log = {0: False, 70: True, 40: True, 10: True, 80: True,
                            50: True, 20: True, 90: True, 60: True, 30: True,
                            100: False}
            self.assertEqual(expected_calls, sleepm.call_args_list)
            self.assertEqual(http_exception_count, 100)
            self.assertEqual(self.uri_opener._sleep_log, expected_log)

            # This one should also clear the log
            try:
                self.uri_opener.GET(url, cache=False)
            except HTTPRequestException:
                pass
            else:
                self.assertTrue(False, 'Expected HTTPRequestException')

            # The log was cleared, all values should be False
            self.assertTrue(all([not v for v in self.uri_opener._sleep_log.values()]))
开发者ID:0x554simon,项目名称:w3af,代码行数:76,代码来源:test_xurllib_error_handling.py

示例12: BaseGoogleAPISearch

# 需要导入模块: from w3af.core.data.url.extended_urllib import ExtendedUrllib [as 别名]
# 或者: from w3af.core.data.url.extended_urllib.ExtendedUrllib import end [as 别名]
class BaseGoogleAPISearch(unittest.TestCase):
    """
    @see: test_GMobileSearch, test_GStandardSearch, test_GAjaxSearch below for
          tests on these particular search implementations.

    This base class is not intended to be run by nosetests.
    """
    GoogleApiSearcher = None

    COUNT = 10
    
    def setUp(self):
        self.opener = ExtendedUrllib()

    def tearDown(self):
        self.opener.end()

    @attr('fails')
    def test_len_link_results(self):
        if self.GoogleApiSearcher is None:
            return
        
        keywords = ['pink', 'red', 'blue']
        random.shuffle(keywords)
        query = ' '.join(keywords)
        start = 0
        # pylint: disable=E1102
        # E1102: self.GoogleApiSearcher is not callable
        searcher = self.GoogleApiSearcher(self.opener, query, start, self.COUNT)

        self.assertEqual(searcher.status, IS_NEW)

        # This actually does the search
        searcher.links

        self.assertEqual(searcher.status, FINISHED_OK, GOOGLE_MSG)

        link_list = '\n'.join(str(r) for r in searcher.links)
        msg = 'Got less results than expected, %s is less than %s:\n%s'
        msg = msg % (len(searcher.links), self.COUNT, link_list)
        self.assertGreaterEqual(len(searcher.links), self.COUNT, msg)

        for link in searcher.links:
            self.assertTrue(URL_REGEX.match(link.URL.url_string) is not None,
                            link.URL.url_string)

        for page in searcher.pages:
            self.assertTrue(isinstance(page, HTTPResponse))

        # Check that the links are related to my search
        related = 0
        for link in searcher.links:
            for key in keywords:
                if key in link.URL.url_string.lower():
                    related += 1

        self.assertTrue(related > 5, related)

    @attr('fails')
    def test_links_results_domain(self):
        if self.GoogleApiSearcher is None:
            return
        
        domain = "www.bonsai-sec.com"
        query = "site:%s" % domain
        start = 0
        # pylint: disable=E1102
        # E1102: self.GoogleApiSearcher is not callable
        searcher = self.GoogleApiSearcher(self.opener, query, start, self.COUNT)

        self.assertEqual(searcher.status, IS_NEW)

        # This actually does the search
        searcher.links

        self.assertEqual(searcher.status, FINISHED_OK, GOOGLE_MSG)

        msg = 'Got less results than expected:\n%s' % '\n'.join(
            str(r) for r in searcher.links)
        self.assertEqual(len(searcher.links), self.COUNT, msg)

        for link in searcher.links:
            link_domain = link.URL.get_domain()
            msg = "Current link domain is '%s'. Expected: '%s'" % (
                link_domain, domain)
            self.assertEqual(link_domain, domain, msg)
开发者ID:0x554simon,项目名称:w3af,代码行数:88,代码来源:test_google.py

示例13: TestSQLMapWrapper

# 需要导入模块: from w3af.core.data.url.extended_urllib import ExtendedUrllib [as 别名]
# 或者: from w3af.core.data.url.extended_urllib.ExtendedUrllib import end [as 别名]
class TestSQLMapWrapper(unittest.TestCase):
    
    SQLI_GET = get_moth_http('/audit/sql_injection/'
                             'where_string_single_qs.py?uname=pablo')

    SSL_SQLI_GET = get_moth_https('/audit/sql_injection/'
                                  'where_string_single_qs.py?uname=pablo')

    SQLI_POST = get_moth_http('/audit/sql_injection/where_integer_form.py')
    
    DATA_POST = 'text=1'
    
    def setUp(self):
        uri = URL(self.SQLI_GET)
        target = Target(uri)
        
        self.uri_opener = ExtendedUrllib()
        
        self.sqlmap = SQLMapWrapper(target, self.uri_opener, debug=True)
    
    def tearDown(self):
        self.uri_opener.end()
        self.sqlmap.cleanup()
    
    @classmethod
    def setUpClass(cls):
        output_dir = os.path.join(SQLMapWrapper.SQLMAP_LOCATION, 'output')
        if os.path.exists(output_dir):
            shutil.rmtree(output_dir)

    @classmethod
    def tearDownClass(cls):
        # Doing this in both setupclass and teardownclass in order to be sure
        # that a ctrl+c doesn't break it
        output_dir = os.path.join(SQLMapWrapper.SQLMAP_LOCATION, 'output')
        if os.path.exists(output_dir):
            shutil.rmtree(output_dir)
        
    def test_verify_vulnerability(self):
        vulnerable = self.sqlmap.is_vulnerable()
        self.assertTrue(vulnerable)
    
    def test_verify_vulnerability_ssl(self):
        uri = URL(self.SSL_SQLI_GET)
        target = Target(uri)
        
        self.uri_opener = ExtendedUrllib()
        
        self.sqlmap = SQLMapWrapper(target, self.uri_opener)
        vulnerable = self.sqlmap.is_vulnerable()
        self.assertTrue(vulnerable, self.sqlmap.last_stdout)

    def test_verify_vulnerability_false(self):
        not_vuln = get_moth_http('/audit/sql_injection/'
                                 'where_string_single_qs.py?fake=pablo')
        uri = URL(not_vuln)
        target = Target(uri)
        
        self.sqlmap = SQLMapWrapper(target, self.uri_opener)
        
        vulnerable = self.sqlmap.is_vulnerable()
        self.assertFalse(vulnerable)
        
    def test_verify_vulnerability_POST(self):
        target = Target(URL(self.SQLI_POST), self.DATA_POST)
        
        self.sqlmap = SQLMapWrapper(target, self.uri_opener)
        
        vulnerable = self.sqlmap.is_vulnerable()
        self.assertTrue(vulnerable, self.sqlmap.last_stdout)
        
    def test_wrapper_invalid_url(self):
        self.assertRaises(TypeError, SQLMapWrapper, self.SQLI_GET, self.uri_opener)
    
    def test_stds(self):
        uri = URL(self.SQLI_GET)
        target = Target(uri)
        
        self.sqlmap = SQLMapWrapper(target, self.uri_opener)
        
        prms = ['--batch',]
        cmd, process = self.sqlmap.run_sqlmap_with_pipes(prms)
        
        self.assertIsInstance(process.stdout, file)
        self.assertIsInstance(process.stderr, file)
        self.assertIsInstance(process.stdin, file)
        self.assertIsInstance(cmd, basestring)
        
        self.assertIn('sqlmap.py', cmd)
        
    def test_target_basic(self):
        target = Target(URL(self.SQLI_GET))
        params = target.to_params()
        
        self.assertEqual(params, ["--url=%s" % self.SQLI_GET])
    
    def test_target_post_data(self):
        target = Target(URL(self.SQLI_GET), self.DATA_POST)
        params = target.to_params()
        
#.........这里部分代码省略.........
开发者ID:3rdDegree,项目名称:w3af,代码行数:103,代码来源:test_sqlmap_wrapper.py

示例14: TestGetAverageRTT

# 需要导入模块: from w3af.core.data.url.extended_urllib import ExtendedUrllib [as 别名]
# 或者: from w3af.core.data.url.extended_urllib.ExtendedUrllib import end [as 别名]
class TestGetAverageRTT(unittest.TestCase):

    MOCK_URL = 'http://www.w3af.org/'

    def setUp(self):
        self.uri_opener = ExtendedUrllib()

    def tearDown(self):
        self.uri_opener.end()
        httpretty.reset()

    @httpretty.activate
    def test_get_average_rtt_for_mutant_all_equal(self):

        def request_callback(request, uri, headers):
            time.sleep(0.5)
            body = 'Yup'
            return 200, headers, body

        httpretty.register_uri(httpretty.GET,
                               self.MOCK_URL,
                               body=request_callback)

        mock_url = URL(self.MOCK_URL)
        fuzzable_request = FuzzableRequest(mock_url)
        average_rtt = self.uri_opener.get_average_rtt_for_mutant(fuzzable_request)

        # Check the response
        self.assertGreater(average_rtt, 0.45)
        self.assertGreater(0.55, average_rtt)

    @httpretty.activate
    def test_get_average_rtt_for_mutant_similar(self):

        def request_callback(request, uri, headers):
            time.sleep(0.4 + random.randint(1, 9) / 100.0)
            body = 'Yup'
            return 200, headers, body

        httpretty.register_uri(httpretty.GET,
                               self.MOCK_URL,
                               body=request_callback)

        mock_url = URL(self.MOCK_URL)
        fuzzable_request = FuzzableRequest(mock_url)
        average_rtt = self.uri_opener.get_average_rtt_for_mutant(fuzzable_request)

        # Check the response
        self.assertGreater(average_rtt, 0.45)
        self.assertGreater(0.55, average_rtt)

    @httpretty.activate
    def test_get_average_rtt_for_mutant_one_off(self):
        #
        # TODO: This is one of the cases I need to fix using _has_outliers!
        #       Calculating the average using 0.3 , 0.2 , 2.0 is madness
        #

        httpretty.register_uri(httpretty.GET,
                               self.MOCK_URL,
                               body=RequestCallBackWithDelays([0.3, 0.2, 2.0]))

        mock_url = URL(self.MOCK_URL)
        fuzzable_request = FuzzableRequest(mock_url)
        average_rtt = self.uri_opener.get_average_rtt_for_mutant(fuzzable_request)

        # Check the response
        self.assertGreater(average_rtt, 0.80)
        self.assertGreater(0.90, average_rtt)
开发者ID:knucker,项目名称:w3af,代码行数:71,代码来源:test_get_average_rtt.py

示例15: TestRedirectHandlerExtendedUrllib

# 需要导入模块: from w3af.core.data.url.extended_urllib import ExtendedUrllib [as 别名]
# 或者: from w3af.core.data.url.extended_urllib.ExtendedUrllib import end [as 别名]
class TestRedirectHandlerExtendedUrllib(unittest.TestCase):
    """
    Test the redirect handler using ExtendedUrllib
    """
    REDIR_DEST = 'http://w3af.org/dest'
    REDIR_SRC = 'http://w3af.org/src'
    OK_BODY = 'Body!'

    def setUp(self):
        consecutive_number_generator.reset()
        self.uri_opener = ExtendedUrllib()

    def tearDown(self):
        self.uri_opener.end()

    @httpretty.activate
    def test_redirect_302_simple_no_follow(self):

        httpretty.register_uri(httpretty.GET, self.REDIR_SRC,
                               body='', status=FOUND,
                               adding_headers={'Location': self.REDIR_DEST})

        redirect_src = URL(self.REDIR_SRC)
        response = self.uri_opener.GET(redirect_src)

        location, _ = response.get_headers().iget('location')
        self.assertEqual(location, self.REDIR_DEST)
        self.assertEqual(response.get_code(), FOUND)
        self.assertEqual(response.get_id(), 1)

    @httpretty.activate
    def test_redirect_302_simple_follow(self):

        httpretty.register_uri(httpretty.GET, self.REDIR_SRC,
                               body='', status=FOUND,
                               adding_headers={'Location': self.REDIR_DEST})

        httpretty.register_uri(httpretty.GET, self.REDIR_DEST,
                               body=self.OK_BODY, status=200)

        redirect_src = URL(self.REDIR_SRC)
        response = self.uri_opener.GET(redirect_src, follow_redirects=True)

        self.assertEqual(response.get_code(), OK)
        self.assertEqual(response.get_body(), self.OK_BODY)
        self.assertEqual(response.get_redir_uri(), URL(self.REDIR_DEST))
        self.assertEqual(response.get_url(), URL(self.REDIR_SRC))
        self.assertEqual(response.get_id(), 2)

    @httpretty.activate
    def test_redirect_301_loop(self):

        httpretty.register_uri(httpretty.GET, self.REDIR_SRC,
                               body='', status=MOVED_PERMANENTLY,
                               adding_headers={'Location': self.REDIR_DEST})

        httpretty.register_uri(httpretty.GET, self.REDIR_DEST,
                               body='', status=MOVED_PERMANENTLY,
                               adding_headers={'URI': self.REDIR_SRC})

        redirect_src = URL(self.REDIR_SRC)
        response = self.uri_opener.GET(redirect_src, follow_redirects=True)

        # At some point the handler detects a loop and stops
        self.assertEqual(response.get_code(), MOVED_PERMANENTLY)
        self.assertEqual(response.get_body(), '')
        self.assertEqual(response.get_id(), 9)

    @httpretty.activate
    def test_redirect_302_without_location_returns_302_response(self):
        # Breaks the RFC
        httpretty.register_uri(httpretty.GET, self.REDIR_SRC,
                               body='', status=FOUND)

        redirect_src = URL(self.REDIR_SRC)
        response = self.uri_opener.GET(redirect_src, follow_redirects=True)

        # Doesn't follow the redirects
        self.assertEqual(response.get_code(), FOUND)
        self.assertEqual(response.get_body(), '')
        self.assertEqual(response.get_id(), 1)

    @httpretty.activate
    def test_redirect_no_follow_file_proto(self):
        httpretty.register_uri(httpretty.GET, self.REDIR_SRC,
                               body='', status=FOUND,
                               adding_headers={'Location':
                                               'file:///etc/passwd'})

        redirect_src = URL(self.REDIR_SRC)
        response = self.uri_opener.GET(redirect_src, follow_redirects=True)

        self.assertEqual(response.get_code(), FOUND)
        self.assertEqual(response.get_body(), '')
        self.assertEqual(response.get_url(), URL(self.REDIR_SRC))
        self.assertEqual(response.get_id(), 1)
开发者ID:0x554simon,项目名称:w3af,代码行数:98,代码来源:test_redirect.py


注:本文中的w3af.core.data.url.extended_urllib.ExtendedUrllib.end方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。