当前位置: 首页>>代码示例>>Python>>正文


Python dummy.Process类代码示例

本文整理汇总了Python中multiprocessing.dummy.Process的典型用法代码示例。如果您正苦于以下问题:Python Process类的具体用法?Python Process怎么用?Python Process使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。


在下文中一共展示了Process类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: process_updates

def process_updates():
    """
    Decides which type the update is and routes it to the appropriate route_updates
    method and launches a thread for the run_extensions method.
    """
    signal.signal(signal.SIGINT, signal.SIG_IGN)
    plugin_http = urllib3.PoolManager(cert_reqs='CERT_REQUIRED', ca_certs=certifi.where())
    plugin_http.timeout = urllib3.Timeout(connect=1.0)
    plugin_http.retries = 3
    update_router = RouteMessage(PLUGINS, plugin_http, GET_ME, CONFIG)
    while RUNNING.value:
        try:
            update = MESSAGE_QUEUE.get_nowait()
        except queue.Empty:
            time.sleep(SLEEP_TIME)
            continue
        extension_thread = ThreadProcess(target=run_extensions, args=(update, ))
        extension_thread.start()
        if 'message' in update:
            update_router.route_update(update['message'])
        elif 'edited_message' in update:
            update_router.route_update(update['edited_message'])
        elif 'callback_query' in update:
            route_callback_query(PLUGINS, GET_ME, CONFIG, plugin_http, update['callback_query'])
        elif 'inline_query' in update:
            route_inline_query(PLUGINS, GET_ME, CONFIG, plugin_http, update['inline_query'])
        extension_thread.join()
开发者ID:arcueidB,项目名称:hitagibot,代码行数:27,代码来源:hitagi.py

示例2: __init__

    def __init__(self, ip, port, uri_opener, proxy_handler=w3afProxyHandler,
                 proxy_cert='core/controllers/daemons/mitm.crt'):
        '''
        :param ip: IP address to bind
        :param port: Port to bind
        :param uri_opener: The uri_opener that will be used to open
            the requests that arrive from the browser
        :param proxy_handler: A class that will know how to handle
            requests from the browser
        :param proxy_cert: Proxy certificate to use, this is needed
            for proxying SSL connections.
        '''
        Process.__init__(self)
        self.daemon = True
        self.name = 'ProxyThread'
        
        # Internal vars
        self._server = None
        self._proxy_handler = proxy_handler
        self._running = False
        self._uri_opener = uri_opener

        # User configured parameters
        self._ip = ip
        self._port = port
        self._proxy_cert = proxy_cert

        # Start the proxy server
        try:
            self._server = ProxyServer((self._ip, self._port),
                                       self._proxy_handler)
        except socket.error, se:
            raise w3afProxyException('Socket error while starting proxy: "%s"'
                                     % se.strerror)
开发者ID:HamzaKo,项目名称:w3af,代码行数:34,代码来源:proxy.py

示例3: __init__

    def __init__(self, ip, port, uri_opener, handler_klass=ProxyHandler,
                 ca_certs=CA_CERT_DIR, name='ProxyThread'):
        """
        :param ip: IP address to bind
        :param port: Port to bind
        :param uri_opener: The uri_opener that will be used to open
                           the requests that arrive from the browser
        :param handler_klass: A class that will know how to handle
                              requests from the browser
        """
        Process.__init__(self)
        self.daemon = True
        self.name = name
        
        # Internal vars
        self._server = None
        self._running = False
        self._uri_opener = uri_opener
        self._ca_certs = ca_certs

        # Stats
        self.total_handled_requests = 0

        # User configured parameters
        try:
            self._config = ProxyConfig(cadir=self._ca_certs,
                                       ssl_version_client='SSLv23',
                                       ssl_version_server='SSLv23',
                                       host=ip,
                                       port=port)
        except AttributeError as ae:
            if str(ae) == "'module' object has no attribute '_lib'":
                # This is a rare issue with the OpenSSL setup that some users
                # (mostly in mac os) find. Not related with w3af/mitmproxy but
                # with some broken stuff they have
                #
                # https://github.com/mitmproxy/mitmproxy/issues/281
                # https://github.com/andresriancho/w3af/issues/10716
                #
                # AttributeError: 'module' object has no attribute '_lib'
                raise ProxyException(self.INCORRECT_SETUP % ae)

            else:
                # Something unexpected, raise
                raise

        # Setting these options together with ssl_version_client and
        # ssl_version_server set to SSLv23 means that the proxy will allow all
        # types (including insecure) of SSL connections
        self._config.openssl_options_client = None
        self._config.openssl_options_server = None

        # Start the proxy server
        try:
            self._server = ProxyServer(self._config)
        except socket.error, se:
            raise ProxyException('Socket error while starting proxy: "%s"'
                                 % se.strerror)
开发者ID:0x554simon,项目名称:w3af,代码行数:58,代码来源:proxy.py

示例4: process_request

    def process_request(self, request, client_address):
        """
        Start a new thread to process the request.

        Override here
        """
        t = Process(target=self.process_request_thread, args=(request, client_address))
        t.daemon = self.daemon_threads
        t.start()
开发者ID:ZionOps,项目名称:w3af,代码行数:9,代码来源:mp_flask.py

示例5: main

def main():
    """
    Creates instances of the above methods and occassionally checks for crashed
    worker processes & relaunches.
    """
    worker_process = list()
    get_update_process = Process(target=get_updates)
    get_update_process.start()
    for i in range(0, int(CONFIG['BOT_CONFIG']['workers'])):
        worker_process.append(Process(target=process_updates))
        worker_process[i].start()
    time_worker = ThreadProcess(target=check_time_args)
    time_worker.start()
    while RUNNING.value:
        time.sleep(30)
        for index, worker in enumerate(worker_process):
            if not worker.is_alive():
                del worker_process[index]
                worker_process.append(Process(target=process_updates))
                worker_process[-1].start()
        if not time_worker.is_alive():
            time_worker = ThreadProcess(target=check_time_args)
            time_worker.start()
        if not get_update_process.is_alive():
            get_update_process = Process(target=get_updates)
            get_update_process.start()
    get_update_process.join()
    time_worker.join()
    for worker in worker_process:
        worker.join()
开发者ID:arcueidB,项目名称:hitagibot,代码行数:30,代码来源:hitagi.py

示例6: _draw_start

    def _draw_start(self):
        if not self._somethingnew:
            return True

        # let's draw!
        q = Queue.Queue()
        evt = Event()
        th = Process(target=self._draw_real, args=(q, evt), name='GTKDraw')
        th.start()
        gobject.timeout_add(500, self._draw_end, q, evt)
        return False
开发者ID:HamzaKo,项目名称:w3af,代码行数:11,代码来源:scanrun.py

示例7: __init__

    def __init__(self, exec_method, ip_address, socks_port=1080):
        Process.__init__(self)
        self.daemon = True

        #    Configuration
        self._exec_method = exec_method
        self._ip_address = ip_address
        self._socks_port = socks_port

        #    Internal
        self._agent_server = None
开发者ID:Adastra-thw,项目名称:w3af,代码行数:11,代码来源:w3afAgentManager.py

示例8: scoreDuplicates

def scoreDuplicates(records, data_model, classifier, num_cores=1, threshold=0):
    if num_cores < 2:
        from multiprocessing.dummy import Process, Queue
        SimpleQueue = Queue
    else:
        from .backport import Process, SimpleQueue, Queue

    first, records = peek(records)
    if first is None:
        raise BlockingError("No records have been blocked together. "
                            "Is the data you are trying to match like "
                            "the data you trained on?")

    record_pairs_queue = Queue(2)
    score_queue = SimpleQueue()
    result_queue = SimpleQueue()

    n_map_processes = max(num_cores, 1)
    score_records = ScoreDupes(data_model, classifier, threshold)
    map_processes = [Process(target=score_records,
                             args=(record_pairs_queue,
                                   score_queue))
                     for _ in range(n_map_processes)]
    [process.start() for process in map_processes]

    reduce_process = Process(target=mergeScores,
                             args=(score_queue,
                                   result_queue,
                                   n_map_processes))
    reduce_process.start()

    fillQueue(record_pairs_queue, records, n_map_processes)

    result = result_queue.get()
    if isinstance(result, Exception):
        raise ChildProcessError

    if result:
        scored_pairs_file, dtype, size = result
        scored_pairs = numpy.memmap(scored_pairs_file,
                                    dtype=dtype,
                                    shape=(size,))
    else:
        dtype = numpy.dtype([('pairs', object, 2),
                             ('score', 'f4', 1)])
        scored_pairs = numpy.array([], dtype=dtype)

    reduce_process.join()
    [process.join() for process in map_processes]

    return scored_pairs
开发者ID:datamade,项目名称:dedupe,代码行数:51,代码来源:core.py

示例9: test_pause

    def test_pause(self):
        output = Queue.Queue()
        self.uri_opener.pause(True)

        def send(uri_opener, output):
            url = URL('http://moth/')
            http_response = uri_opener.GET(url)
            output.put(http_response)

        th = Process(target=send, args=(self.uri_opener, output))
        th.daemon = True
        th.start()

        self.assertRaises(Queue.Empty, output.get, True, 2)
开发者ID:Adastra-thw,项目名称:w3af,代码行数:14,代码来源:test_xurllib.py

示例10: get_stats

def get_stats():
	print 'Fetching NBA player stats...'
	stats_outfile = RUNDAY+'_nba_stats.csv'
	csvout = open(stats_outfile, 'wb')

	NUM_THREADS = 8

	in_queue = Queue()
	out_queue = Queue()
	queue_players(in_queue)

	while not in_queue.empty():	
		jobs = []

		for i in range(NUM_THREADS):
			if not in_queue.empty():
				thread = Process(target=get_stats_helper, args=(in_queue, out_queue))
				jobs.append(thread)
				thread.start()
		for thread in jobs:
			thread.join()	

		while not out_queue.empty():
			player = out_queue.get()
			del player['SUCCESS']
			try: 
				name = player['NAME']
			except KeyError as e:
				continue
			player['TIME'] = RUNDAY
			fieldnames = [
				'TIME',
				'NAME', 
				'JERSEY',
				'SPORT',
				'TEAM',
				'POSITION',
				'PTS',
				'REB',
				'AST',
				'URL'
			]
		
			csvwriter = csv.DictWriter(csvout, delimiter='|', fieldnames=fieldnames)
			csvwriter.writerow(player)
	csvout.close()

	print 'Finished fetching NBA player stats.'
	print 'Ouput saved in %s' % stats_outfile
开发者ID:agnimit,项目名称:cs411,代码行数:49,代码来源:nba.py

示例11: __init__

    def __init__(self):

        pool = Pool(processes=2)
        self.graph = getGraph()

        files = findFiles(opts)

        self.progressQueue = Queue()
        reporter = Process(target=ProgressReport,
                           args=(self.progressQueue, len(files)))
        reporter.start()
        result = pool.map(self.cacheFile, enumerate(files), chunksize=5)
        self.progressQueue.put('END')
        log.info("finished, %s results", len(result))
        reporter.join()
开发者ID:drewp,项目名称:photo,代码行数:15,代码来源:buildCache.py

示例12: _cmd_start

    def _cmd_start(self, params):
        """
        Start the core in a different thread, monitor keystrokes in the main
        thread.

        :return: None
        """
        # Check if the console output plugin is enabled or not, and warn.
        output_plugins = self._w3af.plugins.get_enabled_plugins('output')
        if 'console' not in output_plugins:
            msg = "\nWarning: You disabled the console output plugin. If you"\
                  " start a new scan, the discovered vulnerabilities won\'t be"\
                  " printed to the console, we advise you to enable at least"\
                  " one output plugin in order to be able to actually see the"\
                  " the scan output."
            print msg

        # Note that I'm NOT starting this in a new multiprocess Process
        # please note the multiprocessing.dummy , this is required because
        # I want to start new threads inside this thread and there is a bug
        # with that http://bugs.python.org/issue10015
        self._scan_thread = Process(target=self._real_start)
        self._scan_thread.name = 'ConsoleScanThread'
        self._scan_thread.daemon = True
        self._scan_thread.start()
        
        # let the core thread start
        time.sleep(1)
        
        try:
            if self._w3af.status.get_status() != 'Not running.':
                self.show_progress_on_request()
        except KeyboardInterrupt:
            om.out.console('User pressed Ctrl+C, stopping scan.')
            self._w3af.stop()
开发者ID:carriercomm,项目名称:w3af_analyse,代码行数:35,代码来源:rootMenu.py

示例13: scoreDuplicates

def scoreDuplicates(records, data_model, classifier, num_cores=1, threshold=0) :
    if num_cores < 2 :
        from multiprocessing.dummy import Process, Pool, Queue
        SimpleQueue = Queue
    else :
        from .backport import Process, Pool, SimpleQueue

    record_pairs_queue = SimpleQueue()
    score_queue =  SimpleQueue()
    result_queue = SimpleQueue()

    n_map_processes = max(num_cores-1, 1)
    score_records = ScoreRecords(data_model, classifier, threshold) 
    map_processes = [Process(target=score_records,
                             args=(record_pairs_queue,
                                   score_queue))
                     for _ in range(n_map_processes)]
    [process.start() for process in map_processes]

    reduce_process = Process(target=mergeScores,
                             args=(score_queue,
                                   result_queue,
                                   n_map_processes))
    reduce_process.start()

    fillQueue(record_pairs_queue, records, n_map_processes)

    result = result_queue.get()
    if isinstance(result, Exception) :
        raise ChildProcessError

    if result :
        scored_pairs_file, dtype = result
        scored_pairs = numpy.memmap(scored_pairs_file,
                                    dtype=dtype)
    else :
        scored_pairs = result

    reduce_process.join()
    [process.join() for process in map_processes]

    return scored_pairs
开发者ID:Oge77,项目名称:dedupe,代码行数:42,代码来源:core.py

示例14: scan_stop

def scan_stop(scan_id):
    """
    Stop a scan

    :param scan_id: The scan ID to stop
    :return: Empty result if success, 403 if the current state indicates that
             the scan can't be stopped.
    """
    scan_info = get_scan_info_from_id(scan_id)
    if scan_info is None:
        abort(404, 'Scan not found')

    if not scan_info.w3af_core.can_stop():
        abort(403, 'Scan can not be stop')

    t = Process(target=scan_info.w3af_core.stop, name='ScanStopThread', args=())
    t.daemon = True
    t.start()

    return jsonify({'message': 'Stopping scan'})
开发者ID:foobarmonk,项目名称:w3af,代码行数:20,代码来源:scans.py

示例15: stat_files

def stat_files():
	all_files = []
	for root, dirs, files in os.walk('/home/gzguoyubo/mf/tw2/res/entities/custom_type'):
		ignore = False
		for ig_path in ignore_paths:
			if ig_path in root:
				ignore = True
		if ignore:
			continue
		for fname in files:
			if not fname.endswith('.py'):
				continue
			abs_file_path = join(root, fname)
			all_files.append(abs_file_path)
	
	file_sections = []
	file_total_nums = len(all_files)
	for i in xrange(P_NUM):
		start = i * file_total_nums / P_NUM
		stop = start + file_total_nums / P_NUM
		if i == P_NUM - 1:
			stop = -1
		file_sections.append(all_files[start : stop])

	res_queue = Queue()
	processes = []
	for section in file_sections:
		p = Process(target=stat_file, args=(section, res_queue))
		p.start()
		processes.append(p)
	
	for p in processes:
		p.join()
	
	total_stats = defaultdict(int)
	while not res_queue.empty():
		stat = res_queue.get()
		for author, cnt in stat.iteritems():
			total_stats[author] += cnt
	
	print total_stats
开发者ID:yubo1911,项目名称:saber,代码行数:41,代码来源:stat_codes.py


注:本文中的multiprocessing.dummy.Process类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。