本文整理匯總了Python中result.Result.get_crawlerConfig方法的典型用法代碼示例。如果您正苦於以下問題:Python Result.get_crawlerConfig方法的具體用法?Python Result.get_crawlerConfig怎麽用?Python Result.get_crawlerConfig使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在類result.Result
的用法示例。
在下文中一共展示了Result.get_crawlerConfig方法的1個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Python代碼示例。
示例1: __init__
# 需要導入模塊: from result import Result [as 別名]
# 或者: from result.Result import get_crawlerConfig [as 別名]
#.........這裏部分代碼省略.........
self.config.maxDepth = maxDepth
# Set - Crawl name
def set_crawl_name(self, name):
self.config.name = name
# Set - Crawl depth
def set_crawl_depth(self, depth):
self.config.depth = depth
# Get - Crawl Option
def get_crawl_options(self):
return self.config.options
# Add - Crawl Option
# Option = key value pair <"Option", True/False>
def add_crawl_option(self, option, value):
self.config.options[option] = value
# Get - Result object
def get_result(self):
return self.result
# Get - Result data hash
# Get the current hash of the data
def get_result_dataHash(self):
return self.result.get_dataHash()
# Get - Result source
def get_result_source(self):
return self.result.get_source()
# Set - Result source
def set_result_source(self, source):
self.result.set_source(source)
# Get - Result data
def get_result_data(self):
return self.result.get_data()
# Set - Result data
def set_result_data(self, data):
self.result.set_data(data)
# Add - Result data
def add_result_data(self, data):
self.result.add_data(data)
# Get - Result time start
def get_result_timeStart(self):
return self.result.get_timeStart()
# Set - Result time start
def set_result_timeStart(self, timeStart):
self.result.set_timeStart(timeStart)
# Get - Result time end
def get_result_timeEnd(self):
return self.result.get_timeEnd()
# Set - Result time end
def set_result_timeEnd(self, timeEnd):
self.result.set_timeEnd(timeEnd)
# Get - Result Crawler Configuration
def get_result_crawlerConfig(self):
return self.result.get_crawlerConfig()
# Set - Result Crawler Configuration
def set_result_crawlerConfig(self, crawlerConfig):
self.result.set_crawlerConfig(crawlerConfig)
# Get - Result Referrer
def get_result_referrer(self):
return self.result.get_referrer()
# Set - Result Referrer
def set_result_referrer(self, referrer):
self.result.set_referrer(referrer)
def send_result(self, result):
#send results to parser
# @todo test when there is a destination to send data to
# @todo later goal implement ssl?
conn = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
hostname = "parser"
port = 443
try:
ser = jsonpickle.encode(result)
except:
print("Encode failed..." + str(result))
return
try:
conn.connect((hostname, port))
conn.sendall(ser.encode('utf-8'))
print("Sent all data.")
except Exception as e:
print("Error sending data/connecting. Error: " + str(e))
return