本文整理汇总了Python中DataDB.closeDataConnection方法的典型用法代码示例。如果您正苦于以下问题:Python DataDB.closeDataConnection方法的具体用法?Python DataDB.closeDataConnection怎么用?Python DataDB.closeDataConnection使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类DataDB
的用法示例。
在下文中一共展示了DataDB.closeDataConnection方法的7个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: getSprocDataByTags
# 需要导入模块: import DataDB [as 别名]
# 或者: from DataDB import closeDataConnection [as 别名]
def getSprocDataByTags():
sql = """select tm_tag_id , sum("yaxis") AS "yaxis_t" , sum("yaxis2") AS "yaxis_c", "xaxis" from (
select group_date(sp_timestamp,30) as "xaxis",
sp_sproc_id,
max(sp_self_time) - min(sp_self_time) as "yaxis",
max(sp_calls) - min(sp_calls) as "yaxis2"
from monitor_data.sproc_performance_data
where sp_timestamp > 'now'::timestamp - '9 days'::interval
group by sp_sproc_id , group_date(sp_timestamp,30) ) data,
monitor_data.sprocs,
monitor_data.tag_members
where sprocs.sproc_id = sp_sproc_id
and tm_sproc_name = sproc_name
and tm_schema = get_noversion_name(sproc_schema)
group by tm_tag_id , "xaxis" order by 4 asc;"""
conn = DataDB.getDataConnection()
cur = conn.cursor(cursor_factory=psycopg2.extras.DictCursor)
cur.execute(sql)
data = collections.defaultdict(list)
for r in cur:
data[r['tm_tag_id']].append((r['xaxis'], r['yaxis_t'], r['yaxis_c']))
cur.close()
DataDB.closeDataConnection(conn)
return data
示例2: getTableData
# 需要导入模块: import DataDB [as 别名]
# 或者: from DataDB import closeDataConnection [as 别名]
def getTableData(host, name, interval = None):
conn = DataDB.getDataConnection()
cur = conn.cursor(cursor_factory=psycopg2.extras.DictCursor)
cur.execute(getSingleTableSql(host,name,interval))
d = { 'table_size' : [], 'index_size' : [], 'seq_scans': [], 'index_scans' : [], 'ins':[], 'upd':[], 'del':[], 'hot':[] }
last_is = None
last_ss = None
last_ins = None
last_del = None
last_upd = None
last_hot = None
last_timestamp = 0
for r in cur:
d['table_size'].append ( ( r['tsd_timestamp'] , r['tsd_table_size'] ) )
d['index_size'].append ( ( r['tsd_timestamp'] , r['tsd_index_size'] ) )
if int(time.mktime(r['tsd_timestamp'].timetuple()) * 1000) - last_timestamp <= ( 15*60*1000 ):
if last_ss != None:
d['seq_scans'].append ( ( r['tsd_timestamp'] , r['tsd_seq_scans']-last_ss ) )
if last_is != None:
d['index_scans'].append( ( r['tsd_timestamp'] , r['tsd_index_scans'] - last_is ) )
if last_ins != None and last_ins != 0:
d['ins'].append( ( r['tsd_timestamp'] , r['tsd_tup_ins'] - last_ins ) )
if last_del != None and last_del != 0:
d['del'].append( ( r['tsd_timestamp'] , r['tsd_tup_del'] - last_del ) )
if last_upd != None and last_upd != 0:
d['upd'].append( ( r['tsd_timestamp'] , r['tsd_tup_upd'] - last_upd ) )
if last_hot != None and last_hot != 0:
d['hot'].append( ( r['tsd_timestamp'] , r['tsd_tup_hot_upd'] - last_hot ) )
last_is = r['tsd_index_scans']
last_ss = r['tsd_seq_scans']
last_ins = r['tsd_tup_ins']
last_del = r['tsd_tup_del']
last_upd = r['tsd_tup_upd']
last_hot = r['tsd_tup_hot_upd']
last_timestamp = int(time.mktime(r['tsd_timestamp'].timetuple()) * 1000)
cur.close()
DataDB.closeDataConnection(conn)
return d
示例3: getSingleSprocData
# 需要导入模块: import DataDB [as 别名]
# 或者: from DataDB import closeDataConnection [as 别名]
def getSingleSprocData(name, hostId=1, interval=None, sprocNr = None):
conn = DataDB.getDataConnection()
cur = conn.cursor(cursor_factory=psycopg2.extras.DictCursor)
cur.execute( getSingleSprocSQL(name, hostId, interval, sprocNr ) )
data = { 'calls' : [], 'self_time': [], 'total_time' : [] , 'avg_time' : [] , 'avg_self_time': [] , 'name' : name }
for r in cur:
data['calls'].append( ( r['xaxis'] , r['d_calls'] ) )
data['total_time'].append ( ( r['xaxis'] , r['d_total_time'] ) )
data['self_time'].append ( ( r['xaxis'] , r['d_self_time'] ) )
data['avg_time'].append ( ( r['xaxis'] , r['d_avg_time'] ) )
data['avg_self_time'].append ( ( r['xaxis'] , r['d_avg_self_time'] ) )
cur.close()
DataDB.closeDataConnection(conn)
return data
示例4: getSprocsOrderedBy
# 需要导入模块: import DataDB [as 别名]
# 或者: from DataDB import closeDataConnection [as 别名]
def getSprocsOrderedBy( hostId, order = " ORDER BY SUM(delta_total_time) DESC"):
sql = """SELECT sproc_name
FROM ( """ + viewSprocs() + """ ) t JOIN monitor_data.sprocs ON sp_sproc_id = sproc_id
WHERE sproc_host_id = """ + str(hostId) + """
GROUP BY sproc_name
""" + order + """;
"""
conn = DataDB.getDataConnection()
cur = conn.cursor(cursor_factory=psycopg2.extras.DictCursor)
list= []
cur.execute( sql )
for r in cur:
list.append ( r['sproc_name'] )
cur.close()
DataDB.closeDataConnection(conn)
return list
示例5: getTableIOData
# 需要导入模块: import DataDB [as 别名]
# 或者: from DataDB import closeDataConnection [as 别名]
def getTableIOData(host, name, interval = None):
conn = DataDB.getDataConnection()
cur = conn.cursor(cursor_factory=psycopg2.extras.DictCursor)
cur.execute(getSingleTableIOSql(host,name,interval))
d = { 'heap_read' : [], 'heap_hit' : [], 'index_read' : [], 'index_hit': [] }
last_hr = None
last_hh = None
last_ir = None
last_ih = None
last_timestamp = 0
for r in cur:
if int(time.mktime(r['tio_timestamp'].timetuple()) * 1000) - last_timestamp <= ( 15*60*1000 ):
if last_hr != None:
d['heap_read'].append(( r['tio_timestamp'] , r['tio_heap_read'] - last_hr ))
if last_hh != None:
d['heap_hit'].append(( r['tio_timestamp'] , r['tio_heap_hit'] - last_hh ))
if last_ir != None:
d['index_read'].append(( r['tio_timestamp'] , r['tio_idx_read'] - last_ir ))
if last_ih != None:
d['index_hit'].append(( r['tio_timestamp'] , r['tio_idx_hit'] - last_ih ))
last_hr = r['tio_heap_read']
last_hh = r['tio_heap_hit']
last_ir = r['tio_idx_read']
last_ih = r['tio_idx_hit']
last_timestamp = int(time.mktime(r['tio_timestamp'].timetuple()) * 1000)
cur.close()
DataDB.closeDataConnection(conn)
return d
示例6: getDatabaseSizes
# 需要导入模块: import DataDB [as 别名]
# 或者: from DataDB import closeDataConnection [as 别名]
def getDatabaseSizes(host_id = None):
conn = DataDB.getDataConnection()
cur = conn.cursor(cursor_factory=psycopg2.extras.DictCursor)
cur.execute(getSizeTrendSQL(host_id))
size_data = {}
current_host = 0
last_timestamp = None
for record in cur:
if record['t_host_id'] != current_host:
current_host = record['t_host_id']
set_ins = False
set_del = False
set_upd = False
l_ins = None
l_upd = None
l_del = None
last_timestamp = None
if last_timestamp == None:
last_timestamp = int(time.mktime(record['tsd_timestamp'].timetuple()) * 1000)
if not record['t_host_id'] in size_data:
size_data[record['t_host_id']] = { 'size' : [] , 'ins': [], 'upd': [], 'del':[] }
""" exclude 0 values, otherwise there is a big peak at start, with wraparound this should be ok"""
if not set_ins and record['s_ins']!=0:
l_ins = record['s_ins']
set_ins = True
if not set_upd and record['s_upd']!=0:
l_upd = record['s_upd']
set_upd = True
if not set_del and record['s_del']!=0:
l_del = record['s_del']
set_del = True
if l_ins == None:
l_ins = record['s_ins']
if l_upd == None:
l_upd = record['s_upd']
if l_del == None:
l_del = record['s_del']
size_data[record['t_host_id']]['size'].append( ( record['tsd_timestamp'] , record['size'] ) )
size_data[record['t_host_id']]['ins'].append( ( record['tsd_timestamp'] , max( record['s_ins'] - l_ins , 0) ) )
size_data[record['t_host_id']]['del'].append( ( record['tsd_timestamp'] , max( record['s_del'] - l_del , 0) ) )
size_data[record['t_host_id']]['upd'].append( ( record['tsd_timestamp'] , max( record['s_upd'] - l_upd , 0) ) )
l_ins = record['s_ins']
l_upd = record['s_upd']
l_del = record['s_del']
last_timestamp = int(time.mktime(record['tsd_timestamp'].timetuple()) * 1000)
cur.close()
DataDB.closeDataConnection(conn)
return size_data
示例7: getTableData
# 需要导入模块: import DataDB [as 别名]
# 或者: from DataDB import closeDataConnection [as 别名]
def getTableData(host, name, interval=None):
conn = DataDB.getDataConnection()
cur = conn.cursor(cursor_factory=psycopg2.extras.DictCursor)
cur.execute(getSingleTableSql(host, name, interval))
d = {
"table_size": [],
"index_size": [],
"seq_scans": [],
"index_scans": [],
"ins": [],
"upd": [],
"del": [],
"hot": [],
}
last_is = None
last_ss = None
last_ins = None
last_del = None
last_upd = None
last_hot = None
last_timestamp = 0
for r in cur:
d["table_size"].append((r["tsd_timestamp"], r["tsd_table_size"]))
d["index_size"].append((r["tsd_timestamp"], r["tsd_index_size"]))
if int(time.mktime(r["tsd_timestamp"].timetuple()) * 1000) - last_timestamp <= (15 * 60 * 1000):
if last_ss != None:
d["seq_scans"].append((r["tsd_timestamp"], r["tsd_seq_scans"] - last_ss))
if last_is != None:
d["index_scans"].append((r["tsd_timestamp"], r["tsd_index_scans"] - last_is))
if last_ins != None and last_ins != 0:
d["ins"].append((r["tsd_timestamp"], r["tsd_tup_ins"] - last_ins))
if last_del != None and last_del != 0:
d["del"].append((r["tsd_timestamp"], r["tsd_tup_del"] - last_del))
if last_upd != None and last_upd != 0:
d["upd"].append((r["tsd_timestamp"], r["tsd_tup_upd"] - last_upd))
if last_hot != None and last_hot != 0:
d["hot"].append((r["tsd_timestamp"], r["tsd_tup_hot_upd"] - last_hot))
last_is = r["tsd_index_scans"]
last_ss = r["tsd_seq_scans"]
last_ins = r["tsd_tup_ins"]
last_del = r["tsd_tup_del"]
last_upd = r["tsd_tup_upd"]
last_hot = r["tsd_tup_hot_upd"]
last_timestamp = int(time.mktime(r["tsd_timestamp"].timetuple()) * 1000)
cur.close()
DataDB.closeDataConnection(conn)
return d