本文整理汇总了Python中pydblite.Base.delete方法的典型用法代码示例。如果您正苦于以下问题:Python Base.delete方法的具体用法?Python Base.delete怎么用?Python Base.delete使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类pydblite.Base
的用法示例。
在下文中一共展示了Base.delete方法的2个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: do_Delete
# 需要导入模块: from pydblite import Base [as 别名]
# 或者: from pydblite.Base import delete [as 别名]
def do_Delete (self, result, request, args):
def transfomType(x):
if isinstance(x, unicode): return str(x)
else: return x
####### Replace this section by your logic #######
db = Base('database_service6.pdl')
db.create('testId', 'testMessage', mode="open")
result = db(testId = int(args['testId']))
if len(result) == 0:
responseCode = 404 #ResponseCode.Ok
responseBody = json.dumps(result, sort_keys=True, indent=4, separators=(',', ': '))
else:
responseCode = 200 #ResponseCode.Ok
responseBody = json.dumps(result[0], sort_keys=True, indent=4, separators=(',', ': '))
db.delete(result[0])
db.commit()
####### Replace this section by your logic #######
request.setResponseCode(responseCode)
resp = utils.serviceResponse(responseCode, responseBody)
return resp
示例2: YahoourlsearcherPipeline
# 需要导入模块: from pydblite import Base [as 别名]
# 或者: from pydblite.Base import delete [as 别名]
class YahoourlsearcherPipeline(object):
def open_spider(self, spider):
filename = "urls_log.txt"
self.log_target = codecs.open(filename, 'a+', encoding='utf-8')
self.log_target.truncate()
self.db = Base('URL_database.pdl')
self.db.create('url', 'date', mode="open")
self.log_target.write("***New url scraping session started at: "+ str(datetime.datetime.strftime(datetime.datetime.now(), ' %Y-%m-%d %H:%M:%S ')) + " ***" +"\n")
print("***New url scraping session started at: "+ str(datetime.datetime.strftime(datetime.datetime.now(), ' %Y-%m-%d %H:%M:%S ')) + " ***" +"\n")
self.log_target.write("*** Total url in the Database BEFORE new search: "+ str(len(self.db)) + " ***" + "\n")
dispatcher.connect(self.spider_closed, signals.spider_closed)
def process_item(self, item, spider):
self.db.insert(url=item['url'],
date=item['date']
)
self.log_target.write(item['url'] + " " + item['date'] + "\n")
self.db.commit()
return item
def spider_closed(self, spider):
url_structure = []
print ("End of database")
i = 1
for r in self.db:
#print (str(r["url"]) + " " + str(r["date"]) + " \n")
url_structure.append(url_date(r["url"],r["date"]))
i += 1
print (str(i) + "Url in the DB \n")
self.log_target.write("Session ends at: "+ str(datetime.datetime.strftime(datetime.datetime.now(), ' %Y-%m-%d %H:%M:%S ')) + "\n")
print ("Session ends at: "+ str(datetime.datetime.strftime(datetime.datetime.now(), ' %Y-%m-%d %H:%M:%S ')) + "\n")
self.log_target.write("*** Total url in the Database AFTER the search: "+ str(len(self.db)) + " ***" + "\n")
print ("Elementi presenti nel database: "+ str(len(self.db)) + " in struttura: " + str(len(url_structure)))
all_record = []
for r in self.db:
all_record.append(r)
self.db.delete(all_record)
print ("Elementi presenti nel database: "+ str(len(self.db)))
#set qui
url_structure = {x.url: x for x in url_structure}.values()
for any_url in url_structure:
self.db.insert(any_url.url, any_url.date)
print ("Elementi presenti nel database: "+ str(len(self.db)))
self.db.commit()
self.log_target.write("--- After SET operation: "+ str(len(self.db)) + " --- " + "\n" + "\n" + "\n" + "\n")
self.log_target.close()