本文整理汇总了Python中DataDB类的典型用法代码示例。如果您正苦于以下问题:Python DataDB类的具体用法?Python DataDB怎么用?Python DataDB使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了DataDB类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: getSprocDataByTags
def getSprocDataByTags():
sql = """select tm_tag_id , sum("yaxis") AS "yaxis_t" , sum("yaxis2") AS "yaxis_c", "xaxis" from (
select group_date(sp_timestamp,30) as "xaxis",
sp_sproc_id,
max(sp_self_time) - min(sp_self_time) as "yaxis",
max(sp_calls) - min(sp_calls) as "yaxis2"
from monitor_data.sproc_performance_data
where sp_timestamp > 'now'::timestamp - '9 days'::interval
group by sp_sproc_id , group_date(sp_timestamp,30) ) data,
monitor_data.sprocs,
monitor_data.tag_members
where sprocs.sproc_id = sp_sproc_id
and tm_sproc_name = sproc_name
and tm_schema = get_noversion_name(sproc_schema)
group by tm_tag_id , "xaxis" order by 4 asc;"""
conn = DataDB.getDataConnection()
cur = conn.cursor(cursor_factory=psycopg2.extras.DictCursor)
cur.execute(sql)
data = collections.defaultdict(list)
for r in cur:
data[r['tm_tag_id']].append((r['xaxis'], r['yaxis_t'], r['yaxis_c']))
cur.close()
DataDB.closeDataConnection(conn)
return data
示例2: main
def main():
parser = ArgumentParser(description='PGObserver Frontend')
parser.add_argument('-c', '--config', help='Path to config file. (default: %s)' % DEFAULT_CONF_FILE, dest='config',
default=DEFAULT_CONF_FILE)
parser.add_argument('-p', '--port', help='server port', dest='port', type=int)
args = parser.parse_args()
args.config = os.path.expanduser(args.config)
if not os.path.exists(args.config):
print 'Configuration file missing:', args.config
parser.print_help()
return
with open(args.config, 'rb') as fd:
settings = json.load(fd)
conn_string = ' '.join((
'dbname=' + settings['database']['name'],
'host=' + settings['database']['host'],
'user=' + settings['database']['frontend_user'],
'password=' + settings['database']['frontend_password'],
'port=' + str(settings['database']['port']),
))
print 'Setting connection string to ... ' + conn_string
DataDB.setConnectionString(conn_string)
if 'logfiles' in settings:
logdata.setFilter(settings['logfiles']['liveuserfilter'])
current_dir = os.path.dirname(os.path.abspath(__file__))
conf = {'global': {'server.socket_host': '0.0.0.0', 'server.socket_port': args.port or settings.get('frontend',
{}).get('port') or 8080}, '/': {'tools.staticdir.root': current_dir},
'/static': {'tools.staticdir.dir': 'static', 'tools.staticdir.on': True}}
tplE.setup(settings)
root = None
for h in hosts.getHostData().values():
mf = MonitorFrontend.MonitorFrontend(h['host_id'])
if root == None:
root = mf
setattr(root, h['uishortname'], mf)
root.report = report.Report()
root.export = export.Export()
root.perftables = performance.PerfTables()
root.perfapi = performance.PerfApi()
root.perfindexes = performance.PerfIndexes()
root.sprocs = sprocsfrontend.SprocFrontend()
root.tables = tablesfrontend.TableFrontend()
cherrypy.quickstart(root, config=conf)
示例3: getTableData
def getTableData(host, name, interval = None):
conn = DataDB.getDataConnection()
cur = conn.cursor(cursor_factory=psycopg2.extras.DictCursor)
cur.execute(getSingleTableSql(host,name,interval))
d = { 'table_size' : [], 'index_size' : [], 'seq_scans': [], 'index_scans' : [], 'ins':[], 'upd':[], 'del':[], 'hot':[] }
last_is = None
last_ss = None
last_ins = None
last_del = None
last_upd = None
last_hot = None
last_timestamp = 0
for r in cur:
d['table_size'].append ( ( r['tsd_timestamp'] , r['tsd_table_size'] ) )
d['index_size'].append ( ( r['tsd_timestamp'] , r['tsd_index_size'] ) )
if int(time.mktime(r['tsd_timestamp'].timetuple()) * 1000) - last_timestamp <= ( 15*60*1000 ):
if last_ss != None:
d['seq_scans'].append ( ( r['tsd_timestamp'] , r['tsd_seq_scans']-last_ss ) )
if last_is != None:
d['index_scans'].append( ( r['tsd_timestamp'] , r['tsd_index_scans'] - last_is ) )
if last_ins != None and last_ins != 0:
d['ins'].append( ( r['tsd_timestamp'] , r['tsd_tup_ins'] - last_ins ) )
if last_del != None and last_del != 0:
d['del'].append( ( r['tsd_timestamp'] , r['tsd_tup_del'] - last_del ) )
if last_upd != None and last_upd != 0:
d['upd'].append( ( r['tsd_timestamp'] , r['tsd_tup_upd'] - last_upd ) )
if last_hot != None and last_hot != 0:
d['hot'].append( ( r['tsd_timestamp'] , r['tsd_tup_hot_upd'] - last_hot ) )
last_is = r['tsd_index_scans']
last_ss = r['tsd_seq_scans']
last_ins = r['tsd_tup_ins']
last_del = r['tsd_tup_del']
last_upd = r['tsd_tup_upd']
last_hot = r['tsd_tup_hot_upd']
last_timestamp = int(time.mktime(r['tsd_timestamp'].timetuple()) * 1000)
cur.close()
DataDB.closeDataConnection(conn)
return d
示例4: main
def main():
parser = ArgumentParser(description = 'PGObserver Frontend')
parser.add_argument('-c', '--config', help = 'Path to config file. (default: %s)' % DEFAULT_CONF_FILE, dest="config" , default = DEFAULT_CONF_FILE)
args = parser.parse_args()
args.config = os.path.expanduser(args.config)
if not os.path.exists(args.config):
print 'Configuration file missing:', args.config
parser.print_help()
return
with open(args.config, 'rb') as fd:
settings = json.load(fd)
conn_string = ' '.join( ( "dbname=" + settings['database']['name'],
"host="+settings['database']['host'],
"user="+ settings['database']['frontend_user'],
"password="+ settings['database']['frontend_password'],
"port="+ str(settings['database']['port']) ) )
print "Setting connection string to ... " + conn_string
DataDB.setConnectionString ( conn_string )
if 'logfiles' in settings:
logdata.setFilter( settings['logfiles']['liveuserfilter'] )
current_dir = os.path.dirname(os.path.abspath(__file__))
conf = ( { 'global': { 'server.socket_host': '0.0.0.0',
'server.socket_port': int(settings['frontend']['port']) } ,
'/' : {'tools.staticdir.root' : current_dir },
'/static' : {'tools.staticdir.dir' : 'static' ,
'tools.staticdir.on' : True } } )
tplE.setup( settings )
root = None
for h in hosts.getHostData().values():
mf = MonitorFrontend.MonitorFrontend(h['host_id'])
if root == None:
root = mf
setattr(root , h['settings']['uiShortName'].lower().replace('-','') , mf)
root.report = report.Report()
root.sprocs = sprocsfrontend.SprocFrontend()
root.tables = tablesfrontend.TableFrontend()
cherrypy.quickstart(root,config=conf)
示例5: getTop10Interval
def getTop10Interval(order=avgRuntimeOrder,interval=None,hostId = 1, limit = 10):
sql = """select regexp_replace("name", E'(\\\\(.*\\\\))','()') AS "name",
round( sum(d_calls) , 0 ) AS "calls",
round( sum(d_total_time) , 0 ) AS "totalTime",
round( sum(d_total_time) / sum(d_calls) , 0 ) AS "avgTime"
from ( """ + getSQL(interval, hostId) + """) tt
where d_calls > 0
group by "name"
order by """+order+""" limit """ + str(adapt(limit))
conn = DataDB.getDataConnection()
cur = conn.cursor(cursor_factory=psycopg2.extras.DictCursor)
cur.execute(sql)
sprocs = []
for record in cur:
record['avgTime'] = makeTimeIntervalReadable(record['avgTime'])
record['totalTime'] = makeTimeIntervalReadable(record['totalTime'])
sprocs.append(record)
conn.close()
return sprocs
示例6: topsprocsbyruntime
def topsprocsbyruntime(self):
q = """
select
host_db_export_name as db,
sproc_name,
total_runtime
from (
select
*,
row_number() over(partition by host_db_export_name order by total_runtime desc)
from (
select
host_db_export_name,
substring(sproc_name, 1, position ('(' in sproc_name)-1) as sproc_name,
max(sp_total_time)-min(sp_total_time) as total_runtime
from sprocs
join sproc_performance_data on sp_sproc_id = sproc_id
join hosts on host_id = sproc_host_id
where sp_timestamp > now() - '7days'::interval
and host_db_export_name is not null
group by 1, 2
) a
) b
where row_number <= 10
order by host_db_export_name, total_runtime desc
"""
topbyruntime = DataDB.execute(q)
retdict=defaultdict(list)
for r in topbyruntime:
retdict[r['db']].append(r['sproc_name'])
return json.dumps(retdict)
示例7: getLoadReportData
def getLoadReportData():
conn = DataDB.getDataConnection()
cur = conn.cursor(cursor_factory=psycopg2.extras.DictCursor)
cur.execute("""select load_host_id AS id,
extract(week from load_timestamp)::text AS kw,
round(avg(load_1min_value)/100,2) AS avg,
round(max(load_1min_value)/100,2) AS max,
to_char(min(load_timestamp::date),'dd.mm.YYYY') AS min_date,
to_char(max(load_timestamp::date),'dd.mm.YYYY') AS max_date,
min(load_timestamp::date) AS sort_date
from monitor_data.host_load , monitor_data.hosts
where host_id = load_host_id
and host_enabled
and load_timestamp > ('now'::timestamp - '9 weeks'::interval)
and extract(dow from load_timestamp) IN(1,2,3,4,5)
group by load_host_id, extract(week from load_timestamp)
order by 1 ASC,7 DESC""")
data = defaultdict(list)
lastAvg = None
lastMax = None
lastId = 0
lastRR = None
for r in cur:
rr = {'id' : r['id'],
'avg' : r['avg'],
'max' : r['max'],
'min_date' : r['min_date'],
'max_date' : r['max_date'],
'trendAvg': 0,
'trendMax': 0,
'kw' : r['kw']
}
if lastRR != None and lastRR['id']==rr['id']:
if lastRR['max'] < r['max']:
lastRR['trendMax'] = -1
elif lastRR['max'] > r['max']:
lastRR['trendMax'] = 1
if lastRR['avg'] < r['avg']:
lastRR['trendAvg'] = -1
elif lastRR['avg'] > r['avg']:
lastRR['trendAvg'] = 1
data[int(r['id'])].append(rr);
lastRR = rr
cur.close()
conn.close()
return sorted(data.values(), key = lambda x : hosts.hosts[x[0]['id']]['settings']['uiShortName'])
示例8: getSingleSprocData
def getSingleSprocData(name, hostId=1, interval=None, sprocNr = None):
conn = DataDB.getDataConnection()
cur = conn.cursor(cursor_factory=psycopg2.extras.DictCursor)
cur.execute( getSingleSprocSQL(name, hostId, interval, sprocNr ) )
data = { 'calls' : [], 'self_time': [], 'total_time' : [] , 'avg_time' : [] , 'avg_self_time': [] , 'name' : name }
for r in cur:
data['calls'].append( ( r['xaxis'] , r['d_calls'] ) )
data['total_time'].append ( ( r['xaxis'] , r['d_total_time'] ) )
data['self_time'].append ( ( r['xaxis'] , r['d_self_time'] ) )
data['avg_time'].append ( ( r['xaxis'] , r['d_avg_time'] ) )
data['avg_self_time'].append ( ( r['xaxis'] , r['d_avg_self_time'] ) )
cur.close()
DataDB.closeDataConnection(conn)
return data
示例9: load_filter_lines
def load_filter_lines(host_id, _filter = None, interval = None):
conn = DataDB.getDataConnection()
cur = conn.cursor(cursor_factory=psycopg2.extras.DictCursor)
cur.execute(get_filted_query(host_id,_filter,interval))
l = []
for row in cur:
l.append ( ( row['xaxis'], row['yaxis'] ) )
return l
示例10: getSprocsOrderedBy
def getSprocsOrderedBy( hostId, order = " ORDER BY SUM(delta_total_time) DESC"):
sql = """SELECT sproc_name
FROM ( """ + viewSprocs() + """ ) t JOIN monitor_data.sprocs ON sp_sproc_id = sproc_id
WHERE sproc_host_id = """ + str(hostId) + """
GROUP BY sproc_name
""" + order + """;
"""
conn = DataDB.getDataConnection()
cur = conn.cursor(cursor_factory=psycopg2.extras.DictCursor)
list= []
cur.execute( sql )
for r in cur:
list.append ( r['sproc_name'] )
cur.close()
DataDB.closeDataConnection(conn)
return list
示例11: getTableIOData
def getTableIOData(host, name, interval = None):
conn = DataDB.getDataConnection()
cur = conn.cursor(cursor_factory=psycopg2.extras.DictCursor)
cur.execute(getSingleTableIOSql(host,name,interval))
d = { 'heap_read' : [], 'heap_hit' : [], 'index_read' : [], 'index_hit': [] }
last_hr = None
last_hh = None
last_ir = None
last_ih = None
last_timestamp = 0
for r in cur:
if int(time.mktime(r['tio_timestamp'].timetuple()) * 1000) - last_timestamp <= ( 15*60*1000 ):
if last_hr != None:
d['heap_read'].append(( r['tio_timestamp'] , r['tio_heap_read'] - last_hr ))
if last_hh != None:
d['heap_hit'].append(( r['tio_timestamp'] , r['tio_heap_hit'] - last_hh ))
if last_ir != None:
d['index_read'].append(( r['tio_timestamp'] , r['tio_idx_read'] - last_ir ))
if last_ih != None:
d['index_hit'].append(( r['tio_timestamp'] , r['tio_idx_hit'] - last_ih ))
last_hr = r['tio_heap_read']
last_hh = r['tio_heap_hit']
last_ir = r['tio_idx_read']
last_ih = r['tio_idx_hit']
last_timestamp = int(time.mktime(r['tio_timestamp'].timetuple()) * 1000)
cur.close()
DataDB.closeDataConnection(conn)
return d
示例12: getGroupsData
def getGroupsData():
conn = DataDB.getDataConnection()
groups = {}
cur = conn.cursor(cursor_factory=psycopg2.extras.DictCursor);
cur.execute("SELECT * FROM monitor_data.host_groups;")
for g in cur:
groups [ g['group_id'] ] = g['group_name']
cur.close();
conn.close();
return groups;
示例13: getHostData
def getHostData():
conn = DataDB.getDataConnection()
hosts = {}
cur = conn.cursor(cursor_factory=psycopg2.extras.DictCursor);
cur.execute("SELECT * FROM monitor_data.hosts WHERE host_enabled = true ORDER BY host_id ASC;")
for r in cur:
rr = dict(r);
rr['settings'] = json.loads(rr['host_settings'])
hosts[rr['host_id']] = rr;
cur.close();
conn.close();
return hosts;
示例14: getHostData
def getHostData():
conn = DataDB.getDataConnection()
hosts = {}
cur = conn.cursor(cursor_factory=psycopg2.extras.DictCursor);
cur.execute("SELECT * FROM monitor_data.hosts WHERE host_enabled = true ORDER BY host_id ASC;")
for r in cur:
rr = dict(r)
rr['settings'] = json.loads(rr['host_settings'])
rr['uishortname'] = rr['settings']['uiShortName'].lower().replace('-','')
rr['uilongname'] = rr['settings']['uiLongName']
hosts[rr['host_id']] = rr
cur.close()
conn.close()
return hosts
示例15: getCpuLoad
def getCpuLoad(hostId=1):
load = { "load_15min_avg" : [] , "load_15min_max" : [] }
sql = """ SELECT date_trunc('hour'::text, load_timestamp) + floor(date_part('minute'::text, load_timestamp) / 15::double precision) * '00:15:00'::interval AS load_timestamp,
AVG(load_1min_value) AS load_15min_avg,
MAX(load_1min_value) AS load_15min_max
FROM monitor_data.host_load WHERE load_host_id = """ + str(adapt(hostId)) + """ AND load_timestamp > ('now'::timestamp - '9 days'::interval)
GROUP BY date_trunc('hour'::text, load_timestamp) + floor(date_part('minute'::text, load_timestamp) / 15::double precision) * '00:15:00'::interval
ORDER BY 1 ASC """
conn = DataDB.getDataConnection()
cur = conn.cursor(cursor_factory=psycopg2.extras.DictCursor)
cur.execute(sql)
for record in cur:
load['load_15min_avg'].append( (record['load_timestamp'] , round( float(record['load_15min_avg'])/100,2) ) )
load['load_15min_max'].append( (record['load_timestamp'] , round( float(record['load_15min_max'])/100,2) ) )
return load