本文整理汇总了Python中torndb.Connection.execute方法的典型用法代码示例。如果您正苦于以下问题:Python Connection.execute方法的具体用法?Python Connection.execute怎么用?Python Connection.execute使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类torndb.Connection
的用法示例。
在下文中一共展示了Connection.execute方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: get
# 需要导入模块: from torndb import Connection [as 别名]
# 或者: from torndb.Connection import execute [as 别名]
def get(self, article_id):
# template_name = "article_details.html"
template_name = "mobile/article_details.html"
db = Connection(settings.DATABASE_SERVER,
settings.DATABASE_NAME,
settings.DATABASE_USER,
settings.DATABASE_PASSWORD,
)
sql = "SELECT * FROM articles WHERE id='{0}'".format(
article_id)
article = db.query(sql)[0]
# article["read_count"], article["comment_count"] = \
# get_article_statistics(db, article_id)
article["url"] = urllib.quote(article["url"])
# Update article read count
now = datetime.datetime.now()
sql = """INSERT INTO article_reads (`article_id`, `user_id`, `time`)
VALUES ('{0}', '{1}', '{2}')
""".format(article_id, 0, now)
db.execute(sql)
kwargs = dict(article=article,
day=article["day"])
super(ArticleDetailsHandler, self).render(
template_name,
**kwargs
)
示例2: log_dba_jobs_progress
# 需要导入模块: from torndb import Connection [as 别名]
# 或者: from torndb.Connection import execute [as 别名]
def log_dba_jobs_progress(v_job_id,v_cur_prog_desc,v_cur_cum_prog_desc,v_cur_prog_com_rate):
db = Connection('/tmp/mysql3306.sock',
config.DB_NAME,
config.DB_USER,
config.DB_PASSWD,
time_zone='+8:00')
v_add_job_progress_sql='''insert into dba_job_progress(job_id,cur_prog_desc,cur_cum_prog_desc,cur_prog_com_rate) values(%d,'%s','%s',%d)''' % (
v_job_id,v_cur_prog_desc,v_cur_cum_prog_desc,v_cur_prog_com_rate)
db.execute(v_add_job_progress_sql.replace('%','%%'))
db.close()
示例3: final_dba_job
# 需要导入模块: from torndb import Connection [as 别名]
# 或者: from torndb.Connection import execute [as 别名]
def final_dba_job(v_job_id):
db = Connection('/tmp/mysql3306.sock',
config.DB_NAME,
config.DB_USER,
config.DB_PASSWD,
time_zone='+8:00')
# 更新job队列状态为完成
v_update_job_sql = '''update dba_jobs set status=1 where job_id=%d''' % (
v_job_id)
db.execute(v_update_job_sql.replace('%','%%'))
db.close()
示例4: initial_dba_job
# 需要导入模块: from torndb import Connection [as 别名]
# 或者: from torndb.Connection import execute [as 别名]
def initial_dba_job(v_op_user,v_op_comment):
db = Connection('/tmp/mysql3306.sock',
config.DB_NAME,
config.DB_USER,
config.DB_PASSWD,
time_zone='+8:00')
v_add_job_sql = '''insert into dba_jobs(op_user,job_desc) values('%s','%s')''' % (
v_op_user,v_op_comment)
v_job_id=db.execute(v_add_job_sql.replace('%','%%'))
db.close()
return v_job_id
示例5: post
# 需要导入模块: from torndb import Connection [as 别名]
# 或者: from torndb.Connection import execute [as 别名]
def post(self):
title = self.get_argument("title", "")
author = self.get_argument("author", "")
category = self.get_argument("category", "")
date = self.get_argument("date", "")
profile = self.get_argument("profile", "")
picUrl = self.get_argument("picUrl", "")
url = self.get_argument("url", "")
time = datetime.datetime.strptime(date, "%m/%d/%Y")
day = {
0: "Mon",
1: "Tue",
2: "Wed",
3: "Thu",
4: "Fri",
5: "Sat",
6: "Sun",
}[time.weekday()]
sql = """INSERT INTO articles (`title`, `author`, `day`, `time`, `url`, `profile`, `picUrl`, `category`)
VALUES ('{0}', '{1}', '{2}', '{3}', '{4}', '{5}', '{6}', '{7}');
""".format(
title,
author,
day,
time,
url,
profile,
picUrl,
category
)
db = Connection(settings.DATABASE_SERVER,
settings.DATABASE_NAME,
settings.DATABASE_USER,
settings.DATABASE_PASSWORD,
)
lastrowid = db.execute(sql)
if lastrowid:
self.redirect("/articles/details/{0}/".format(lastrowid))
else:
template_name = "/upload_error.html"
self.render(template_name)
示例6: upload_user_stats
# 需要导入模块: from torndb import Connection [as 别名]
# 或者: from torndb.Connection import execute [as 别名]
def upload_user_stats():
#连接配置中心库
db = Connection('/tmp/mysql3306.sock',
config.DB_NAME,
config.DB_USER,
config.DB_PASSWD,
time_zone='+8:00')
db.execute('truncate table meta_table_statistics') # 先truncate 再插入
db.execute('truncate table meta_index_statistics') # 先truncate 再插入
# 由于id为64的E4S 服务器不在平台管辖,先手工剔除 b.id !=64
v_sql = r"""SELECT b.ip,b.port,b.id as instance_id,a.id as schema_id,a.name as db_name from
resources_schema a,tag b where b.online_flag=1 and a.owner=b.id and b.id !=64 order by a.id,b.id"""
#print v_sql
upload_tables_list = db.query(v_sql)
if upload_tables_list: # 对实例表进行循环
i=0
for upload_table in upload_tables_list:
instance_id = upload_table['instance_id']
schema_id = upload_table['schema_id']
db_name = upload_table['db_name']
host_ip = upload_table['ip']
mysql_port = upload_table['port']
v_host =host_ip + ':' + str(mysql_port)
#连接远程实例
db_remote = Connection(v_host,
'information_schema',
config.DBA_QUERY_USER,
config.DBA_QUERY_PASSWD,
time_zone='+8:00')
# 取表的元信息然后插入
v_pl = r"""SELECT table_schema,table_name,rows_read,rows_changed,rows_changed_x_indexes
from table_statistics where table_schema='%s' """ % (db_name)
#print v_pl
table_list = db_remote.query(v_pl)
for table_row in table_list:
table_schema = table_row['table_schema']
table_name = table_row['table_name']
rows_read = table_row['rows_read']
rows_changed = table_row['rows_changed']
rows_changed_x_indexes = table_row['rows_changed_x_indexes']
#try:
v_insert_sql='''insert into meta_table_statistics(instance_id,schema_id,table_schema,
table_name,rows_read,rows_changed,rows_changed_x_indexes)
values(%d,%d,'%s','%s',%d,%d,%d)''' % (
instance_id,schema_id,table_schema,
table_name,rows_read,rows_changed,rows_changed_x_indexes
)
#print v_insert_sql
db.execute(v_insert_sql.replace('%','%%'))
# except Exception, e:
# print e.message
# print v_insert_sql
# text = "insert meta tables error!," + e.message + ',' + v_insert_sql
# log_w(text)
# 取索引的元信息,然后插入
#.........这里部分代码省略.........
示例7: upload_auto_increment
# 需要导入模块: from torndb import Connection [as 别名]
# 或者: from torndb.Connection import execute [as 别名]
#.........这里部分代码省略.........
#vp_id = process_row['ID']
vp_table_schema = process_row['TABLE_SCHEMA']
vp_table_name = process_row['TABLE_NAME']
vp_column_name = process_row['COLUMN_NAME']
vp_data_type = process_row['DATA_TYPE']
# 若是空,变成mysql的null,否则加上引号再传递进去,格式为%s ,而不是'%s'
#if vp_db is None:
# vp_db = 'NULL'
#else:
# vp_db = "'"+vp_db+"'"
#print vp_db
vp_column_type = process_row['COLUMN_TYPE']
vp_max_value = process_row['MAX_VALUE']
vp_auto_increment = process_row['AUTO_INCREMENT']
vp_auto_increment_ratio = process_row['AUTO_INCREMENT_RATIO']
#vp_info = process_row['INFO']
#if vp_info is None:
# vp_info = 'NULL'
#else:
# #vp_info = "'"+vp_info+"'"
# vp_info = vp_info.replace('"',"'") # 双引号替换为单引号
# vp_info = '"'+vp_info+'"' # 防止字符里面本身包含单引号
# v_insert_sql='''insert into log_processlist(instance_id,TID,USER,HOST,DB,COMMAND,TIME,STATE,INFO,
# TIME_MS,ROWS_SENT,ROWS_EXAMINED) values(%d,%d,'%s','%s','%s','%s',%d,'%s','%s',%d,%d,%d)''' % (
# instance_id,vp_id,vp_user,vp_host,vp_db,vp_command,vp_time,vp_state,vp_info,vp_time_ms,vp_rows_sent,vp_rows_examined)
#try:
v_delete_sql='''delete from auto_increment_list where INSTANCE_ID = '%s' and TABLE_SCHEMA = '%s' and TABLE_NAME = '%s' and COLUMN_NAME = '%s' ''' % (
vp_instance_id,vp_table_schema,vp_table_name,vp_column_name)
print v_delete_sql
db.execute(v_delete_sql)
v_insert_sql='''insert into auto_increment_list(instance_id,server_ip,TABLE_SCHEMA,TABLE_NAME,COLUMN_NAME,DATA_TYPE,COLUMN_TYPE,MAX_VALUE,AUTO_INCREMENT,AUTO_INCREMENT_RATIO)
values(%d,'%s','%s','%s','%s','%s','%s',%d,%d,%d)''' % (
vp_instance_id,vp_server_ip,vp_table_schema,vp_table_name,vp_column_name,vp_data_type,vp_column_type,int(vp_max_value),int(vp_auto_increment),int(vp_auto_increment_ratio))
print v_insert_sql
db.execute(v_insert_sql)
#db.execute(v_insert_sql)
# except Exception, e:
# print e.message
# print v_insert_sql
# text = "insert process_list error!," + e.message + ',' + v_insert_sql
# log_w(text)
db_remote.close()
i=i+1
v_sendmail_sql=r'''select count(*) count
from auto_increment_list b where AUTO_INCREMENT_RATIO >= 80 '''
if_list = db.query(v_sendmail_sql)
for if_row in if_list:
v_if = if_row['count']
print v_if
if v_if >= 1 :
print '11'
v_warn_sql=r'''select SERVER_IP,TABLE_SCHEMA,TABLE_NAME,COLUMN_NAME,MAX_VALUE,AUTO_INCREMENT,AUTO_INCREMENT_RATIO
from auto_increment_list b where AUTO_INCREMENT_RATIO >= 80 '''
print v_warn_sql
warn_list = db.query(v_warn_sql)
v_server_ip = '\r\n'
for warn_row in warn_list:
v_server_ip = v_server_ip + warn_row['SERVER_IP'] +'|对象名:' + warn_row['TABLE_SCHEMA'] +'|表名:'+ warn_row['TABLE_NAME'] + '|字段名:' + warn_row['COLUMN_NAME']+'\r\n'
print v_server_ip
v_msg_text = v_server_ip
v_receiver = '[email protected],[email protected]'
v_subject = '有快溢出的自增ID,细节请去监控页面查看'
v_return = func.send_mail_to_devs(v_receiver,v_subject,v_msg_text)
db.close()
示例8: upload_processlist
# 需要导入模块: from torndb import Connection [as 别名]
# 或者: from torndb.Connection import execute [as 别名]
def upload_processlist():
#连接配置中心库
db = Connection('/tmp/mysql3306.sock',
config.DB_NAME,
config.DB_USER,
config.DB_PASSWD,
time_zone='+8:00')
v_sql = r"""SELECT id,ip,port from tag b where online_flag=1 and is_showprocesslist=1"""
#print v_sql
upload_server_list = db.query(v_sql)
if upload_server_list: # 对实例表进行循环
i=0
for upload_server in upload_server_list:
instance_id = upload_server['id']
host_ip = upload_server['ip']
mysql_port = upload_server['port']
v_host =host_ip + ':' + str(mysql_port)
#连接远程实例
db_remote = Connection(v_host,
'information_schema',
config.DBA_QUERY_USER,
config.DBA_QUERY_PASSWD,
time_zone='+8:00')
v_pl = r"""SELECT ID,USER,HOST,DB,COMMAND,TIME,STATE,INFO,TIME_MS,ROWS_SENT,ROWS_EXAMINED from PROCESSLIST"""
#print v_pl
process_list = db_remote.query(v_pl)
for process_row in process_list:
vp_id = process_row['ID']
vp_user = process_row['USER']
vp_host = process_row['HOST']
vp_db = process_row['DB']
# 若是空,变成mysql的null,否则加上引号再传递进去,格式为%s ,而不是'%s'
if vp_db is None:
vp_db = 'NULL'
else:
vp_db = "'"+vp_db+"'"
#print vp_db
vp_command = process_row['COMMAND']
vp_time = process_row['TIME']
vp_state = process_row['STATE']
vp_info = process_row['INFO']
if vp_info is None:
vp_info = 'NULL'
else:
#vp_info = "'"+vp_info+"'"
vp_info = vp_info.replace('"',"'") # 双引号替换为单引号
vp_info = '"'+vp_info+'"' # 防止字符里面本身包含单引号
vp_time_ms = process_row['TIME_MS']
vp_rows_sent = process_row['ROWS_SENT']
vp_rows_examined = process_row['ROWS_EXAMINED']
# v_insert_sql='''insert into log_processlist(instance_id,TID,USER,HOST,DB,COMMAND,TIME,STATE,INFO,
# TIME_MS,ROWS_SENT,ROWS_EXAMINED) values(%d,%d,'%s','%s','%s','%s',%d,'%s','%s',%d,%d,%d)''' % (
# instance_id,vp_id,vp_user,vp_host,vp_db,vp_command,vp_time,vp_state,vp_info,vp_time_ms,vp_rows_sent,vp_rows_examined)
#try:
v_insert_sql='''insert into log_processlist(instance_id,TID,USER,HOST,DB,COMMAND,TIME,STATE,INFO,
TIME_MS,ROWS_SENT,ROWS_EXAMINED) values(%d,%d,'%s','%s',%s,'%s',%d,'%s',%s,%d,%d,%d)''' % (
instance_id,vp_id,vp_user,vp_host,vp_db,vp_command,vp_time,vp_state,vp_info,vp_time_ms,vp_rows_sent,vp_rows_examined)
#print v_insert_sql
db.execute(v_insert_sql.replace('%','%%'))
#db.execute(v_insert_sql)
# except Exception, e:
#.........这里部分代码省略.........
示例9: Connection
# 需要导入模块: from torndb import Connection [as 别名]
# 或者: from torndb.Connection import execute [as 别名]
# -*- coding: utf-8 -*-
from torndb import Connection
db = Connection('127.0.0.1', 'mysql', 'root')
try:
db.execute('create database `car_spider`;')
print 'create database success.'
except:
print 'database exists.'
pass
car_info_sql = """
CREATE TABLE `car_spider`.`car` (
`id` int(11) NOT NULL AUTO_INCREMENT,
`logo` varchar(200) NOT NULL,
`model` varchar(255),
`befor_price` varchar(100),
`after_price` int(12),
`plan` varchar(500),
`Purchased` int(12),
`link` varchar(255) default "",
`created` datetime,
`updated` datetime,
`status` varchar(10),
PRIMARY KEY (`id`),
INDEX `idx_dt` (created)
) ENGINE=MyISAM AUTO_INCREMENT=1 DEFAULT CHARSET=utf8 COLLATE=utf8_bin;
"""
示例10: __init__
# 需要导入模块: from torndb import Connection [as 别名]
# 或者: from torndb.Connection import execute [as 别名]
class Database:
"""docstring for ClassName"""
def __init__(
self, master_host,from_host, to_host, db_ip_priv,
os_user,os_password_source,os_password_target,os_password_priv,
db_user,db_user_pwd_source,db_user_pwd_target, db_user_pwd_priv,
db_port_master,db_port_source, db_port_target,db_port_priv,
db_user_name_rep,db_rep_pwd,
db_root_user,db_root_pwd_target):
self.master_host = master_host
self.from_host = from_host
self.to_host = to_host
self.db_ip_priv = db_ip_priv
self.port = 22 #ssh 端口
self.os_user = os_user
self.os_password_source = os_password_source
self.os_password_target = os_password_target
self.os_password_priv = os_password_priv
self.db_user_name = db_user
self.db_user_pwd_source = db_user_pwd_source
self.db_user_pwd_target = db_user_pwd_target
self.db_user_pwd_priv = db_user_pwd_priv
self.db_port_master = db_port_master
self.db_port_source = db_port_source
self.db_port_target = db_port_target
self.db_port_priv = db_port_priv
self.db_user_name_rep = db_user_name_rep
self.db_rep_pwd = db_rep_pwd
self.db_root_user = db_root_user
self.db_root_pwd_target = db_root_pwd_target
self.today = datetime.date.today().strftime("%Y%m%d")
self.xtra_time = datetime.datetime.now().strftime('%Y-%m-%d_%H%M%S')
self.xtrabackup_bin_path = config.xtrabackup_bin_path
self.xtrabackup_export_path = config.xtrabackup_export_path
#self.xtrabackup_restore_path = config.xtrabackup_restore_path
self.xtrabackup_restore_path = config.mysql_datadir_path
self.mysql_client_path = config.mysql_client_path
self.mydumper_bin_path = config.mydumper_bin_path
self.mydumper_export_path = config.mydumper_export_path
self.mydumper_import_path = config.mydumper_import_path
self.db = Connection('/tmp/mysql3306.sock',
config.DB_NAME,
config.DB_USER,
config.DB_PASSWD,
time_zone='+8:00')
def export_database_metadata(self,v_prosess_id): # 导出数据库的表结构和视图以及存储过程
text = "%s %s" % (
datetime.datetime.now(),
"One: Begin export master Database table stru,views,procs, Please wait ....")
print "\033[1;32;40m%s\033[0m" % text # 绿色
log_w(text)
v_db_socket='--socket=/tmp/mysql'+str(self.db_port_source)+'.sock'
try:
s = paramiko.SSHClient()
s.set_missing_host_key_policy(paramiko.AutoAddPolicy())
s.connect(self.from_host, self.port, self.os_user, self.os_password_source)
# 获取 mysqldump 要导出的数据库列表清单
# 不能加grep -v ,因为logplatform 数据库会被一起过滤掉
conm_db_list_1 = r'''%s/mysql -N -u%s %s -e"show databases"|grep -v information_schema|grep -v common_schema|grep -v performance_schema|grep -v mysql|tr "\n" " "''' % (
self.mysql_client_path,self.db_user_name,v_db_socket)
v_update_sql = '''update dba_job_progress set cur_prog_shell_cmd = ' ''' + conm_db_list_1 +''' ' where id= '''+ str(v_prosess_id)
print conm_db_list_1
print v_update_sql
self.db.execute(v_update_sql)
conm_db_list = r'''%s/mysql -N -u%s -p'%s' %s -e"show databases"|grep -v information_schema|grep -v common_schema|grep -v performance_schema|grep -v mysql|tr "\n" " "''' % (
self.mysql_client_path,self.db_user_name,self.db_user_pwd_source,v_db_socket)
print conm_db_list
stdin, stdout, stderr = s.exec_command(conm_db_list)
if stdout.channel.recv_exit_status() ==0:
#.........这里部分代码省略.........
示例11: start_schedule
# 需要导入模块: from torndb import Connection [as 别名]
# 或者: from torndb.Connection import execute [as 别名]
def start_schedule():
#if __name__ == '__main__':
os_user = config.OS_USER
os_password = config.OS_APPS_PASSWD
scheduler = Scheduler(daemonic = False)
scheduler.print_jobs()
#scheduler.remove_jobstore('file',close=True)
#scheduler.shutdown(wait=False)
scheduler.shutdown()
#scheduler.unschedule_func(backup)
scheduler.add_jobstore(ShelveJobStore('/tmp/db_schedule'), 'file')
v_current_jobs = scheduler.get_jobs()
print v_current_jobs
if v_current_jobs: # 如果job存在的话,先请客
scheduler.unschedule_func(backup)
#scheduler = Scheduler(standalone=True)
#scheduler = Scheduler(daemon=True)
#连接配置中心库,获取数据库备份周期等信息
db = Connection('/tmp/mysql3306.sock',
config.DB_NAME,
config.DB_USER,
config.DB_PASSWD,
time_zone='+8:00')
v_sql = r"""SELECT a.instance_id,b.ip,b.port,a.backup_interval_type,a.backup_start_time from mysql_ins_bak_setup a,tag b where
a.instance_id=b.id """
print v_sql
bak_server_list = db.query(v_sql)
if bak_server_list: # 有server需要配置
i=0
# 把还没有开始的调度任务,置为手工结束 backup_result_type=4
v_manual_end_sql = 'update mysql_ins_bak_log set backup_result_type=4 where backup_result_type=0'
db.execute(v_manual_end_sql)
for bak_server in bak_server_list:
instance_id = bak_server['instance_id']
from_host = bak_server['ip']
#print from_host
mysql_port = bak_server['port']
backup_interval_type = bak_server['backup_interval_type']
backup_start_time = bak_server['backup_start_time']
str_start_date= time.strftime("%Y-%m-%d") + ' ' + backup_start_time
print str_start_date
if backup_interval_type == 1: # every day
#内存jobstore
#scheduler.add_interval_job(backup, days=1, start_date=str_start_date, args=[from_host, mysql_port, os_user, os_password])
#文件jobstore jobstore='file'
scheduler.add_interval_job(backup, days=1, start_date=str_start_date, args=[from_host, mysql_port, os_user, os_password], jobstore='file')
#scheduler.add_interval_job(backup, days=1, start_date='2014-07-18 18:17:01', args=[from_host, mysql_port, os_user, os_password])
elif backup_interval_type == 2: # every week
scheduler.add_interval_job(backup, weeks=1, start_date=str_start_date, args=[from_host, mysql_port, os_user, os_password])
elif backup_interval_type == 3: # every hour
scheduler.add_interval_job(backup, hours=1, start_date=str_start_date, args=[from_host, mysql_port, os_user, os_password])
# 开始在数据库记录备份的调度任务状态 0:调度任务已启动,实际备份还没有开始
#.........这里部分代码省略.........
示例12: add_schedule_backup_job
# 需要导入模块: from torndb import Connection [as 别名]
# 或者: from torndb.Connection import execute [as 别名]
def add_schedule_backup_job():
# if __name__ == '__main__':
os_user = config.OS_USER
os_password = config.OS_APPS_PASSWD
scheduler = BackgroundScheduler() # 默认内存的jobstore
url = "sqlite:////home/apps/dbajob.sqlite"
scheduler.add_jobstore("sqlalchemy", url=url, alias="sqlite_js")
scheduler.print_jobs()
print "a"
scheduler.remove_all_jobs(jobstore="sqlite_js")
scheduler.print_jobs()
print "remove"
# v_current_jobs = scheduler.get_jobs()
# print v_current_jobs
# if v_current_jobs: # 如果job存在的话,先请客
# scheduler.remove_job('backup')
# 连接配置中心库,获取数据库备份周期等信息
db = Connection("/tmp/mysql3306.sock", config.DB_NAME, config.DB_USER, config.DB_PASSWD, time_zone="+8:00")
v_sql = r"""SELECT a.instance_id,b.ip,b.port,a.backup_interval_type,a.backup_start_time from mysql_ins_bak_setup a,tag b where
a.instance_id=b.id """
print v_sql
bak_server_list = db.query(v_sql)
if bak_server_list: # 有server需要配置
i = 0
# 把还没有开始的调度任务,置为手工结束 backup_result_type=4
v_manual_end_sql = "update mysql_ins_bak_log set backup_result_type=4 where backup_result_type=0"
db.execute(v_manual_end_sql)
for bak_server in bak_server_list:
instance_id = bak_server["instance_id"]
from_host = bak_server["ip"]
# print from_host
mysql_port = bak_server["port"]
backup_interval_type = bak_server["backup_interval_type"]
backup_start_time = bak_server["backup_start_time"]
str_start_date = time.strftime("%Y-%m-%d") + " " + backup_start_time
print str_start_date
v_job_id = "backup_%s_%s" % (from_host, str(mysql_port))
if backup_interval_type == 1: # every day
# scheduler.add_interval_job(backup, days=1, start_date=str_start_date, args=[from_host, mysql_port, os_user, os_password], jobstore='file')
scheduler.add_job(
backup,
"interval",
id=v_job_id,
days=1,
start_date=str_start_date,
args=[from_host, mysql_port, os_user, os_password],
replace_existing=True,
jobstore="sqlite_js",
)
elif backup_interval_type == 2: # every week weeks=1
scheduler.add_job(
backup,
"interval",
id=v_job_id,
weeks=1,
start_date=str_start_date,
args=[from_host, mysql_port, os_user, os_password],
replace_existing=True,
jobstore="sqlite_js",
)
elif backup_interval_type == 3: # every hour hours=1
scheduler.add_job(
#.........这里部分代码省略.........
示例13: add_single_backup_job
# 需要导入模块: from torndb import Connection [as 别名]
# 或者: from torndb.Connection import execute [as 别名]
#.........这里部分代码省略.........
scheduler.add_jobstore("sqlalchemy", url=url, alias="sqlite_js")
scheduler.print_jobs()
print "a"
# 连接配置中心库,获取数据库备份周期等信息
db = Connection("/tmp/mysql3306.sock", config.DB_NAME, config.DB_USER, config.DB_PASSWD, time_zone="+8:00")
v_sql = r"""SELECT a.instance_id,b.ip,b.port,a.backup_interval_type,a.backup_start_time from mysql_ins_bak_setup a,tag b where
a.instance_id=b.id and a.id=%d""" % (
v_setup_id
)
print v_sql
bak_server = db.get(v_sql)
instance_id = bak_server["instance_id"]
from_host = bak_server["ip"]
# print from_host
mysql_port = bak_server["port"]
backup_interval_type = bak_server["backup_interval_type"]
backup_start_time = bak_server["backup_start_time"]
str_start_date = time.strftime("%Y-%m-%d") + " " + backup_start_time
print str_start_date
v_job_id = "backup_%s_%s" % (from_host, str(mysql_port))
if backup_interval_type == 1: # every day
# scheduler.add_interval_job(backup, days=1, start_date=str_start_date, args=[from_host, mysql_port, os_user, os_password], jobstore='file')
scheduler.add_job(
backup,
"interval",
id=v_job_id,
days=1,
start_date=str_start_date,
args=[from_host, mysql_port, os_user, os_password],
replace_existing=True,
jobstore="sqlite_js",
)
elif backup_interval_type == 2: # every week weeks=1
scheduler.add_job(
backup,
"interval",
id=v_job_id,
weeks=1,
start_date=str_start_date,
args=[from_host, mysql_port, os_user, os_password],
replace_existing=True,
jobstore="sqlite_js",
)
elif backup_interval_type == 3: # every hour hours=1
scheduler.add_job(
backup,
"interval",
id=v_job_id,
hours=1,
start_date=str_start_date,
args=[from_host, mysql_port, os_user, os_password],
replace_existing=True,
jobstore="sqlite_js",
)
else:
pass
scheduler.print_jobs()
print "b"
scheduler.start()
scheduler.print_jobs()
print "c"
# 开始在数据库记录备份的调度任务状态 0:调度任务已启动,实际备份还没有开始
v_sche_start_sql = """insert into mysql_ins_bak_log(instance_id,backup_result_type)
values(%d,0)""" % (
instance_id
)
db.execute(v_sche_start_sql)
db.close()
示例14: __init__
# 需要导入模块: from torndb import Connection [as 别名]
# 或者: from torndb.Connection import execute [as 别名]
class MySQLHelper:
def __init__(self, host, user, password, database):
self._db = Connection(host, database, user=user, password=password, max_idle_time=10)
if not self._db._db:
raise Exception('%s' % self._db.error_msg)
def query(self, sql, *parameters):
return self._db.query(sql, *parameters)
def query_one(self, sql, *parameters):
res = self._db.query(sql, *parameters)
return res.pop() if res else {}
def write(self, sql, *parameters):
return self._db.execute(sql, *parameters)
def gen_insert(self, tablename, rowdict, replace=False):
return self._db.gen_insert_sql(tablename, rowdict, replace)
def insert_dict(self, tablename, rowdict):
key_strs = ", ".join(["""`%s`""" % key for key in rowdict.keys()])
value_strs = ", ".join(["""'%s'""" % rowdict.get(key) for key in rowdict.keys()])
sql = """INSERT INTO %s (%s) VALUES (%s)""" % (tablename, key_strs, value_strs)
return self._db.execute(sql)
def insert_batch(self, tablename, batch_params):
value_batch = []
for param in batch_params:
keys = param.keys()
key_strs = ", ".join(["""`%s`""" % key for key in keys])
value_strs = "(%s)" % ", ".join(
["""'%s'""" % "%s" % param.get(key) for key in keys])
value_batch.append(value_strs)
sql = """INSERT INTO %s (%s) VALUES %s""" % (tablename, key_strs, ",".join(value_batch))
return self._db.execute(sql)
def update_dict(self, tablename, rowdict, where):
sql = """UPDATE %s SET %s WHERE %s""" % (
tablename, self._formatter(rowdict, ', '), self._formatter(where, " AND "))
return self._db.execute(sql)
def transaction(self, query, parameters):
return self._db.transaction(query, parameters)
def get(self, tablename, conds, cols='', extra_conds={}):
if not tablename:
return False
cols = "%s" % ','.join(cols) if cols else '*'
wheres = []
values = []
if conds and isinstance(conds, dict):
for key, value in conds.items():
if isinstance(value, (list, tuple)):
wheres.append("`%s` IN (%s)" % (key, "'%s'" % "','".join([str(v) for v in value])))
else:
wheres.append("`%s`=%%s" % key)
values.append("%s" % value)
where_str = ' AND '.join(wheres)
sql = """ SELECT %s FROM `%s` """ % (cols, tablename)
if where_str:
sql += """ WHERE %s """ % (where_str)
if extra_conds.get('group_by'):
sql += """ GROUP by %s """ % ','.join(extra_conds['group_by'])
if extra_conds.get('order_by'):
sql += """ ORDER by %s """ % ','.join(extra_conds['order_by'])
if extra_conds.get('limit'):
sql += """ LIMIT %s """ % ','.join(map(str, extra_conds['limit']))
return self._db.query(sql, *values)
def _serialize(self, value):
if isinstance(value, (dict, list, set)):
value = json.dumps(value)
else:
value = "%s" % value
return value
def _formatter(self, pairs, delimiter):
values = []
for key, value in pairs.items():
if not isinstance(value, list):
value = self._serialize(value)
values.append("""`%s`='%s'""" % (key, value))
else:
values.append("""`%s` in ("%s")""" % (key, '","'.join([self._serialize(val) for val in value])))
return delimiter.join(values)
def __del__(self):
self._db.close()
示例15: upload_meta_tables
# 需要导入模块: from torndb import Connection [as 别名]
# 或者: from torndb.Connection import execute [as 别名]
def upload_meta_tables():
#连接配置中心库
db = Connection('/tmp/mysql3306.sock',
config.DB_NAME,
config.DB_USER,
config.DB_PASSWD,
time_zone='+8:00')
db.execute('truncate table meta_tables') # 先truncate 再插入
db.execute('truncate table meta_statistics') # 先truncate 再插入
db.execute('truncate table meta_redundant_keys') # 先truncate 再插入
# 由于id为64的E4S 服务器不在平台管辖,先手工剔除 b.id !=64
v_sql = r"""SELECT b.ip,b.port,b.id as instance_id,a.id as schema_id,a.name as db_name from
resources_schema a,tag b where b.online_flag=1 and a.owner=b.id and b.id !=64 order by a.id,b.id"""
#print v_sql
upload_tables_list = db.query(v_sql)
if upload_tables_list: # 对实例表进行循环
i=0
for upload_table in upload_tables_list:
instance_id = upload_table['instance_id']
schema_id = upload_table['schema_id']
db_name = upload_table['db_name']
host_ip = upload_table['ip']
mysql_port = upload_table['port']
v_host =host_ip + ':' + str(mysql_port)
#连接远程实例
db_remote = Connection(v_host,
'information_schema',
config.DBA_QUERY_USER,
config.DBA_QUERY_PASSWD,
time_zone='+8:00')
# 取表的元信息然后插入
v_pl = r"""SELECT table_catalog,table_schema,table_name,table_type,engine,version,row_format,
table_rows,avg_row_length,data_length,max_data_length,index_length,data_free,auto_increment,create_time,
update_time,check_time,table_collation,checksum,
create_options,table_comment from tables where table_type='BASE TABLE' and TABLE_SCHEMA='%s' """ % (db_name)
#print v_pl
table_list = db_remote.query(v_pl)
for table_row in table_list:
table_catalog = table_row['table_catalog']
table_schema = table_row['table_schema']
table_name = table_row['table_name']
table_type = table_row['table_type']
engine = table_row['engine']
version = table_row['version']
row_format = table_row['row_format']
table_rows = table_row['table_rows']
avg_row_length = table_row['avg_row_length']
data_length = table_row['data_length']
max_data_length = table_row['max_data_length']
index_length = table_row['index_length']
data_free = table_row['data_free']
auto_increment = table_row['auto_increment']
# 本来Int类型,但为了处理Null值插入,以字符串类型%s 进行插入
if auto_increment is None:
auto_increment = 'NULL'
else:
auto_increment = "'"+str(auto_increment)+"'"
# 日期型插入,由于要处理null值插入,经过处理后,以%s 进行插入
create_time = table_row['create_time']
#.........这里部分代码省略.........