本文整理匯總了Python中visualizer.Visualizer.generate_html方法的典型用法代碼示例。如果您正苦於以下問題:Python Visualizer.generate_html方法的具體用法?Python Visualizer.generate_html怎麽用?Python Visualizer.generate_html使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在類visualizer.Visualizer
的用法示例。
在下文中一共展示了Visualizer.generate_html方法的5個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Python代碼示例。
示例1: debugTestMain
# 需要導入模塊: from visualizer import Visualizer [as 別名]
# 或者: from visualizer.Visualizer import generate_html [as 別名]
def debugTestMain(folderpath, dirname):
logging.info(" setting config...")
config = SeleniumConfiguration(Browser.FireFox, "http://140.112.42.145:2000/demo/nothing/main.html")
config.set_max_depth(1)
config.set_max_length(5)
config.set_trace_amount(1)
config.set_max_states(100)
config.set_folderpath(folderpath)
config.set_dirname(dirname)
config.set_automata_fname('automata.json')
config.set_traces_fname('traces.json')
config.set_frame_tags(['iframe'])
config.set_dom_inside_iframe(True)
config.set_simple_clickable_tags()
config.set_simple_inputs_tags()
config.set_simple_normalizers()
logging.info(" setting executor...")
executor = SeleniumExecutor(config.get_browserID(), config.get_url())
logging.info(" setting crawler...")
automata = Automata(config)
databank = InlineDataBank("140.112.42.145:2000", "jeff", "zj4bj3jo37788", "test")
algorithm = MonkeyCrawler() #DFScrawler()
crawler = SeleniumCrawler(config, executor, automata, databank, algorithm)
logging.info(" crawler start run...")
crawler.run_algorithm()
logging.info(" end! save automata...")
algorithm.save_traces()
automata.save_automata(config.get_automata_fname())
Visualizer.generate_html('web', os.path.join(config.get_path('root'), config.get_automata_fname()))
config.save_config('config.json')
示例2: SeleniumMain
# 需要導入模塊: from visualizer import Visualizer [as 別名]
# 或者: from visualizer.Visualizer import generate_html [as 別名]
def SeleniumMain(web_submit_id, folderpath=None, dirname=None):
logging.info(" connect to mysql")
print("connect to sql")
databank = MysqlDataBank("localhost", "root", "", "test")
url, deep, time = databank.get_websubmit(web_submit_id)
logging.info(" setting config...")
print(" setting config...")
config = SeleniumConfiguration(Browser.PhantomJS, url, folderpath, dirname)
config.set_max_depth(deep)
config.set_max_time(int(time)*60)
config.set_simple_clickable_tags()
config.set_simple_inputs_tags()
config.set_simple_normalizers()
config.set_frame_tags(['iframe'])
logging.info(" setting executor...")
executor = SeleniumExecutor(config.get_browserID(), config.get_url())
logging.info(" setting crawler...")
automata = Automata()
crawler = SeleniumCrawler(config, executor, automata, databank)
logging.info(" crawler start run...")
automata = crawler.run()
crawler.close()
logging.info(" end! save automata...")
automata.save_automata(config)
automata.save_traces(config)
Visualizer.generate_html('web', os.path.join(config.get_path('root'), config.get_automata_fname()))
config.save_config('config.json')
示例3: run_algorithm
# 需要導入模塊: from visualizer import Visualizer [as 別名]
# 或者: from visualizer.Visualizer import generate_html [as 別名]
def run_algorithm(self):
# repeat for trace_amount times
for i in range( self.configuration.get_trace_amount() ):
self.initial()
while self.action_events:
#check time
if (time.time() - self.time_start) > self.configuration.get_max_time():
logging.info("|||| TIMO OUT |||| end crawl ")
break
string = ''.join([ str(action['action']['clickable'].get_id())+str(action['depth'])+str(action['state'].get_id()) for action in self.action_events ])
logging.info(' action_events : '+string )
state, action, depth = self.get_next_action()
self.change_state(state, action, depth)
edge = self.trigger_action(state, action, depth)
self.update_states(state, edge, action, depth)
self.close()
self.algorithm.save_traces()
self.automata.save_automata(self.configuration.get_automata_fname())
Visualizer.generate_html('web', os.path.join(self.configuration.get_path('root'), self.configuration.get_automata_fname()))
return self.automata
示例4: CBTestMain
# 需要導入模塊: from visualizer import Visualizer [as 別名]
# 或者: from visualizer.Visualizer import generate_html [as 別名]
def CBTestMain(folderpath, dirname,web_submit_id):
logging.info(" Type: Cross Browser Testing")
logging.info(" connect to mysql")
print("")
print("connect to sql")
databank = MysqlDataBank("localhost", "root", "", "test")
url, deep, time, b1, b2 = databank.get_websubmit(int(web_submit_id))
basic_browserID = str(b1)
other_browserID = str(b2)
depth = int(deep)
logging.info(" A new CBT begings...")
logging.info(" setting config...")
config = SeleniumConfiguration(int(basic_browserID),int(other_browserID), url)
# max 3
config.set_max_depth(int(depth))
# max 3
config.set_max_length(int(depth))
# should be 1
config.set_trace_amount(1)
# should be 100 no use?
config.set_max_states(5)
config.set_folderpath(folderpath)
config.set_dirname(dirname)
config.set_automata_fname('automata.json')
config.set_traces_fname('traces.json')
#config.set_frame_tags(['iframe'])
config.set_dom_inside_iframe(True)
config.set_simple_clickable_tags()
config.set_simple_inputs_tags()
config.set_simple_normalizers()
logging.info(" setting executor...")
#nothing here
executor = CBTExecutor(config.get_browserID(), config.get_url())
logging.info(" setting crawler...")
automata = Automata(config)
#databank = InlineDataBank("140.112.42.145:2000", "jeff", "zj4bj3jo37788", "test")
databank = InlineDataBank("localhost", "B00901138", "R124249166", "test")
print('start Cross Browser Testing...')
#acually it's CBT algorithm
algorithm = CBTCrawler(int(other_browserID),url)
crawler = SeleniumCrawler(config, executor, automata, databank, algorithm)
logging.info(" crawler start runing...")
crawler.run_algorithm()
print(" end! save automata...")
logging.info(" end! save automata...")
algorithm.save_traces()
automata.save_automata(config.get_automata_fname())
Visualizer.generate_html('web', os.path.join(config.get_path('root'), config.get_automata_fname()))
config.save_config('config.json')
示例5: SeleniumMutationTrace
# 需要導入模塊: from visualizer import Visualizer [as 別名]
# 或者: from visualizer.Visualizer import generate_html [as 別名]
def SeleniumMutationTrace(folderpath, dirname, config_fname, traces_fname, trace_id, method_id, modes):
logging.info(" loading config...")
config = load_config(config_fname)
config.set_folderpath(folderpath)
config.set_dirname(dirname)
config.set_mutation_trace(traces_fname, trace_id)
config.set_mutation_method(method_id)
config.set_mutation_modes(modes)
logging.info(" setting executor...")
executor = SeleniumExecutor(config.get_browserID(), config.get_url())
logging.info(" setting crawler...")
automata = Automata()
databank = MysqlDataBank("localhost", "B00901138", "R124249166", "test")
crawler = SeleniumCrawler(config, executor, automata, databank)
logging.info(" crawler start run...")
crawler.run_mutant()
logging.info(" end! save automata...")
automata.save_traces(config)
automata.save_automata(config)
Visualizer.generate_html('web', os.path.join(config.get_path('root'), config.get_automata_fname()))