本文整理匯總了Python中custom.db.mysql.Custom_MySQL.delete_by_sql方法的典型用法代碼示例。如果您正苦於以下問題:Python Custom_MySQL.delete_by_sql方法的具體用法?Python Custom_MySQL.delete_by_sql怎麽用?Python Custom_MySQL.delete_by_sql使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在類custom.db.mysql.Custom_MySQL
的用法示例。
在下文中一共展示了Custom_MySQL.delete_by_sql方法的1個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Python代碼示例。
示例1: run_task
# 需要導入模塊: from custom.db.mysql import Custom_MySQL [as 別名]
# 或者: from custom.db.mysql.Custom_MySQL import delete_by_sql [as 別名]
def run_task(self, task_param):
mysql = Custom_MySQL(using='hadoops2')
mysql_etl = Custom_MySQL(using='etl_manage')
redis = Custom_Redis(using='etl_task')
mysql.begin()
mysql_etl.begin()
datas = {'status': 0}
where = {'id': int(task_param['id'])}
task_key_exc = ""
try:
hive = Custom_Hive(using='ares_dw')
game = task_param['game']
platform = task_param['platform']
table_name = task_param['table_name']
log_date = task_param['log_date']
prefix_sql = task_param['prefix_sql']
exec_sql = task_param['exec_sql']
post_sql = task_param['post_sql']
date_cycle = task_param['date_cycle']
random_str = str(random.randint(0, 999999999))
stimes = str(int(time.time()))
task_date = task_param['task_date']
task_name = task_param['task_name']
do_rate = task_param['do_rate']
#task_key_tmp = str(game)+str(platform)+str(task_name)+str(date_cycle)+str(do_rate)+str(log_date)+str(task_date)
task_key_tmp = str(task_name)+str(date_cycle)+str(do_rate)+str(log_date)+str(task_date)
task_key_md5 = hashlib.md5()
task_key_md5.update(task_key_tmp)
task_key_md5_result = task_key_md5.hexdigest()
task_key = "dm2report_new_"+str(game)+"_"+str(platform)+"_"+str(task_key_md5_result)
task_key_exc = task_key
'''
celery 本身的機製存在漏洞,會將一個已經完成任務再次分配給其他的worker,致使同一個任務執行多次
為防止此種現象,在任務開始執行時,將任務的“唯一標示”寫入redis中,標注已執行
'''
#如果task_key is None, 則表示該條任務沒有執行過,正常執行即可
#如果task_key = 0, 則表示該條任務上次執行失敗,允許重複執行
if redis.get(task_key) == "0" or redis.get(task_key) is None:
tmp_file_dir = "/tmp/tmp/%s/%s/%s" % (game, platform, log_date)
#創建本地目錄
if not os.path.exists(tmp_file_dir):
os.makedirs(tmp_file_dir)
tmp_file = "%s/%s_%s_%s_%s.txt" % (tmp_file_dir, table_name, date_cycle, random_str, stimes)
hql_conf = "SET hive.support.concurrency=false;" \
"SET hive.exec.compress.output=true;" \
"SET mapred.output.compression.codec=com.hadoop.compression.lzo.LzopCodec; "
'''
將任務標示為開始執行:1
'''
datas['status'] = 1
datas['start_time'] = str(datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S'))
mysql_etl.update('dm2report_new_log', ' id = %(id)d' % where, **datas)
mysql_etl.commit()
#在redis標注 任務正在執行
redis.set(task_key, 1)
#執行前置sql,將數據臨時寫入本地,用於mysql load數據
if prefix_sql is not None:
result = mysql.delete_by_sql(prefix_sql)
logger.info('exec prefix_sql: delete old data {0}'.format(result['output']))
if result['status'] != 0:
logger.error('Error Code %s : %s Cmd: %s' % (result['status'], result['output'], prefix_sql))
'''
執行失敗,將其狀態標為未執行:0
'''
datas['status'] = 0
#在redis標注 任務未開始執行
redis.set(task_key, 0)
else:
'''
將任務標示為刪除臨時文件完成:2
'''
datas['status'] = 2
datas.pop('start_time')
mysql_etl.update('dm2report_new_log', ' id = %(id)d' % where, **datas)
mysql_etl.commit()
'''
開始執行hive ql,將數據dump到本地
'''
result = hive.dump(hql_conf+exec_sql, tmp_file)
logger.info('exec exec_sql: dump data {0}'.format(result['output']))
if result['status'] != 0 or False == os.path.exists('%s' % tmp_file):
logger.error('Error Code %s : %s Cmd: %s' % (result['status'], result['output'], exec_sql))
#在redis標注 任務未開始執行
#.........這裏部分代碼省略.........