本文整理汇总了Python中opserver_util.OpServerUtils类的典型用法代码示例。如果您正苦于以下问题:Python OpServerUtils类的具体用法?Python OpServerUtils怎么用?Python OpServerUtils使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了OpServerUtils类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: main
def main():
querier = StatQuerier()
if querier.parse_args() != 0:
return
if len(querier._args.select)==0 and querier._args.dtable is None:
tab_url = "http://" + querier._args.analytics_api_ip + ":" +\
querier._args.analytics_api_port +\
"/analytics/table/StatTable." + querier._args.table
schematxt = OpServerUtils.get_url_http(tab_url + "/schema")
schema = json.loads(schematxt.text)['columns']
for pp in schema:
if pp.has_key('suffixes') and pp['suffixes']:
des = "%s %s" % (pp['name'],str(pp['suffixes']))
else:
des = "%s" % pp['name']
if pp['index']:
valuetxt = OpServerUtils.get_url_http(tab_url + "/column-values/" + pp['name'])
print "%s : %s %s" % (des,pp['datatype'], valuetxt.text)
else:
print "%s : %s" % (des,pp['datatype'])
else:
result = querier.query()
querier.display(result)
示例2: run
def run(self):
if self.parse_args() != 0:
return
if len(self._args.select)==0 and self._args.dtable is None:
tab_url = "http://" + self._args.analytics_api_ip + ":" +\
self._args.analytics_api_port +\
"/analytics/table/StatTable." + self._args.table
schematxt = OpServerUtils.get_url_http(tab_url + "/schema",
self._args.admin_user, self._args.admin_password)
schema = json.loads(schematxt.text)['columns']
for pp in schema:
if pp.has_key('suffixes') and pp['suffixes']:
des = "%s %s" % (pp['name'],str(pp['suffixes']))
else:
des = "%s" % pp['name']
if pp['index']:
valuetxt = OpServerUtils.get_url_http(
tab_url + "/column-values/" + pp['name'],
self._args.admin_user, self._args.admin_password)
print "%s : %s %s" % (des,pp['datatype'], valuetxt.text)
else:
print "%s : %s" % (des,pp['datatype'])
else:
result = self.query()
self.display(result)
示例3: query
def query(self):
query_url = OpServerUtils.opserver_query_url(
self._args.analytics_api_ip,
self._args.analytics_api_port)
if self._args.dtable is not None:
rtable = self._args.dtable
else:
rtable = self._args.table
query_dict = OpServerUtils.get_query_dict(
"StatTable." + rtable, str(self._start_time), str(self._end_time),
select_fields = self._args.select,
where_clause = "AND".join(self._args.where),
sort_fields = self._args.sort)
print json.dumps(query_dict)
resp = OpServerUtils.post_url_http(
query_url, json.dumps(query_dict), sync = True)
res = None
if resp is not None:
res = json.loads(resp)
res = res['value']
return res
示例4: post_query
def post_query(self, table, start_time=None, end_time=None,
select_fields=None,
where_clause='',
sort_fields=None, sort=None, limit=None, filter=None, dir=None):
res = None
try:
flows_url = OpServerUtils.opserver_query_url(
self._ip, str(self._port))
print flows_url
query_dict = OpServerUtils.get_query_dict(
table, start_time, end_time,
select_fields,
where_clause,
sort_fields, sort, limit, filter, dir)
print json.dumps(query_dict)
res = []
resp = OpServerUtils.post_url_http(
flows_url, json.dumps(query_dict))
if resp is not None:
resp = json.loads(resp)
qid = resp['href'].rsplit('/', 1)[1]
result = OpServerUtils.get_query_result(
self._ip, str(self._port), qid)
for item in result:
res.append(item)
except Exception as e:
print str(e)
finally:
return res
示例5: get_dbusage_info
def get_dbusage_info(self, ip, port, user, password):
"""Collects database usage information from all db nodes
Returns:
A dictionary with db node name as key and db usage in % as value
"""
to_return = {}
try:
uve_url = "http://" + ip + ":" + str(port) + \
"/analytics/uves/database-nodes?cfilt=DatabaseUsageInfo"
data = OpServerUtils.get_url_http(uve_url, user, password)
node_dburls = json.loads(data.text)
for node_dburl in node_dburls:
# calculate disk usage percentage for analytics in each
# cassandra node
db_uve_data = OpServerUtils.get_url_http(node_dburl['href'],
user, password)
db_uve_state = json.loads(db_uve_data.text)
db_usage_in_perc = (100*
float(db_uve_state['DatabaseUsageInfo']['database_usage'][0]['analytics_db_size_1k'])/
float(db_uve_state['DatabaseUsageInfo']['database_usage'][0]['disk_space_available_1k'] +
db_uve_state['DatabaseUsageInfo']['database_usage'][0]['disk_space_used_1k']))
to_return[node_dburl['name']] = db_usage_in_perc
except Exception as inst:
self._logger.error(type(inst)) # the exception instance
self._logger.error(inst.args) # arguments stored in .args
self._logger.error(inst) # __str__ allows args to be printed directly
self._logger.error("Could not retrieve db usage information")
self._logger.info("db usage:" + str(to_return))
return to_return
示例6: get_pending_compaction_tasks
def get_pending_compaction_tasks(self):
"""Collects pending compaction tasks from all db nodes
Returns:
A dictionary with db node name as key and pending compaction
tasks in % as value
"""
to_return = {}
try:
uve_url = "http://" + self._ip + ":" + str(self._admin_port) + \
"/analytics/uves/database-nodes?cfilt=" \
"CassandraStatusData:cassandra_compaction_task"
data = OpServerUtils.get_url_http(uve_url, self._admin_user,
self._admin_password)
node_dburls = json.loads(data.text)
for node_dburl in node_dburls:
# get pending compaction tasks for analytics in each
# cassandra node
db_uve_data = OpServerUtils.get_url_http(node_dburl['href'],
self._admin_user, self._admin_password)
db_uve_state = json.loads(db_uve_data.text)
pending_compaction_tasks = \
int(db_uve_state['CassandraStatusData']
['cassandra_compaction_task']
['pending_compaction_tasks'])
to_return[node_dburl['name']] = pending_compaction_tasks
except Exception as inst:
self._logger.error("Exception: Could not retrieve pending"
" compaction tasks information %s" %
str(type(inst)))
self._logger.info("pending compaction tasks :" + str(to_return))
return to_return
示例7: run
def run(self):
try:
index = 0
analytics_api_ip = self._defaults['analytics_api_ip']
analytics_api_port = self._defaults['analytics_api_port']
username = self._defaults['username']
password = self._defaults['password']
for arg in sys.argv:
index = index + 1
if arg == "--analytics-api-ip":
analytics_api_ip = sys.argv[index]
elif arg == "--analytics-api-port":
analytics_api_port = sys.argv[index]
elif arg == "--admin-user":
username = sys.argv[index]
elif arg == "--admin-password":
password = sys.argv[index]
tab_url = "http://" + analytics_api_ip + ":" +\
analytics_api_port + "/analytics/tables"
tables = OpServerUtils.get_url_http(tab_url,
username, password)
if tables != {}:
table_list = json.loads(tables.text)
for table in table_list:
if table['type'] == 'OBJECT':
OBJECT_TYPE_LIST.append(str(table['display_name']))
if self.parse_args() != 0:
return
if self._args.tail:
start_time = UTCTimestampUsec() - 10*pow(10,6)
while True:
self._start_time = start_time
self._end_time = UTCTimestampUsec()
start_time = self._end_time + 1
time.sleep(3)
result = self.query()
if result == -1:
return
self.display(result)
else:
start_time = self._args.start_time
end_time = self._args.end_time
if not self._args.start_time:
start_time = "now-10m"
if not self._args.end_time:
end_time = "now"
try:
self._start_time, self._end_time = \
OpServerUtils.parse_start_end_time(
start_time = start_time,
end_time = end_time,
last = self._args.last)
except:
return -1
result = self.query()
if result == -1:
return
self.display(result)
except KeyboardInterrupt:
return
示例8: __init__
def __init__(self, query_json, analytics_api_ip,
analytics_api_port, user, password, logger):
self.query_json = query_json
self._analytics_api_ip = analytics_api_ip
self._analytics_api_port = analytics_api_port
self._user = user
self._password = password
self._logger = logger
if self.query_json is not None:
self._start_time = self.query_json['start_time']
self._end_time = self.query_json['end_time']
# If the start_time/end_time in the query is specified as
# relative time, then the actual start_time/end_time for the
# FlowRecordTable query and UFlowData query would be different.
# Since the FlowRecordTable is queried first and the result of
# which is used to query the UFlowData table, the result may
# not be correct if the start_time/end_time is different for
# FlowRecord and UFlowData queries. Therefore, convert the
# relative start/end time to absolute time.
if not str(self._start_time).isdigit():
self._start_time = \
OpServerUtils.convert_to_utc_timestamp_usec(self._start_time)
if not str(self._end_time).isdigit():
self._end_time = \
OpServerUtils.convert_to_utc_timestamp_usec(self._end_time)
示例9: aggregate
def aggregate(self, key, flat):
'''
This function does parallel aggregation of this UVE's state.
It aggregates across all sources and return the global state of the UVE
'''
result = {}
try:
for typ in self._state[key].keys():
result[typ] = {}
for objattr in self._state[key][typ].keys():
if self._is_sum(self._state[key][typ][objattr]):
sum_res = self._sum_agg(self._state[key][typ][objattr])
if flat:
result[typ][objattr] = \
OpServerUtils.uve_attr_flatten(sum_res)
else:
result[typ][objattr] = sum_res
elif self._is_union(self._state[key][typ][objattr]):
union_res = self._union_agg(
self._state[key][typ][objattr])
if flat:
result[typ][objattr] = \
OpServerUtils.uve_attr_flatten(union_res)
else:
result[typ][objattr] = union_res
elif self._is_append(self._state[key][typ][objattr]):
result[typ][objattr] = self._append_agg(
self._state[key][typ][objattr])
append_res = ParallelAggregator.consolidate_list(
result, typ, objattr)
if flat:
result[typ][objattr] =\
OpServerUtils.uve_attr_flatten(append_res)
else:
result[typ][objattr] = append_res
else:
default_res = self._default_agg(
self._state[key][typ][objattr])
if flat:
if (len(default_res) == 1):
result[typ][objattr] =\
OpServerUtils.uve_attr_flatten(
default_res[0][0])
else:
nres = []
for idx in range(len(default_res)):
nres.append(default_res[idx])
nres[idx][0] =\
OpServerUtils.uve_attr_flatten(
default_res[idx][0])
result[typ][objattr] = nres
else:
result[typ][objattr] = default_res
except KeyError:
pass
return result
示例10: run
def run(self):
topdir = "/usr/share/doc/contrail-docs/html/messages/"
extn = ".json"
stat_schema_files = []
for dirpath, dirnames, files in os.walk(topdir):
for name in files:
if name.lower().endswith(extn):
stat_schema_files.append(os.path.join(dirpath, name))
stat_tables = []
for schema_file in stat_schema_files:
with open(schema_file) as data_file:
data = json.load(data_file)
for _, tables in data.iteritems():
for table in tables:
if table not in stat_tables:
stat_tables.append(table)
stat_table_list = [xx.stat_type + "." + xx.stat_attr for xx in VizConstants._STAT_TABLES]
stat_table_list.extend([xx["stat_type"] + "." + xx["stat_attr"] for xx in stat_tables])
if self.parse_args(stat_table_list) != 0:
return
if len(self._args.select) == 0 and self._args.dtable is None:
tab_url = (
"http://"
+ self._args.analytics_api_ip
+ ":"
+ self._args.analytics_api_port
+ "/analytics/table/StatTable."
+ self._args.table
)
schematxt = OpServerUtils.get_url_http(
tab_url + "/schema", self._args.admin_user, self._args.admin_password
)
schema = json.loads(schematxt.text)["columns"]
for pp in schema:
if pp.has_key("suffixes") and pp["suffixes"]:
des = "%s %s" % (pp["name"], str(pp["suffixes"]))
else:
des = "%s" % pp["name"]
if pp["index"]:
valuetxt = OpServerUtils.get_url_http(
tab_url + "/column-values/" + pp["name"], self._args.admin_user, self._args.admin_password
)
print "%s : %s %s" % (des, pp["datatype"], valuetxt.text)
else:
print "%s : %s" % (des, pp["datatype"])
else:
result = self.query()
self.display(result)
示例11: _send_query
def _send_query(self, query):
"""Post the query to the analytics-api server and returns the
response."""
self._logger.debug('Sending query: %s' % (query))
opserver_url = OpServerUtils.opserver_query_url(self._analytics_api_ip,
str(self._analytics_api_port))
resp = OpServerUtils.post_url_http(opserver_url, query, True)
try:
resp = json.loads(resp)
value = resp['value']
except (TypeError, ValueError, KeyError):
raise _QueryError(query)
self._logger.debug('Query response: %s' % str(value))
return value
示例12: main
def main():
try:
querier = LogQuerier()
if querier.parse_args() != 0:
return
if querier._args.f:
start_time = UTCTimestampUsec() - 10 * pow(10, 6)
while True:
querier._start_time = start_time
querier._end_time = UTCTimestampUsec()
start_time = querier._end_time + 1
time.sleep(3)
result = querier.query()
if result == -1:
return
querier.display(result)
else:
start_time = querier._args.start_time
end_time = querier._args.end_time
if not querier._args.start_time:
start_time = "now-10m"
if not querier._args.end_time:
end_time = "now"
try:
querier._start_time, querier._end_time = OpServerUtils.parse_start_end_time(
start_time=start_time, end_time=end_time, last=querier._args.last
)
except:
return -1
result = querier.query()
if result == -1:
return
querier.display(result)
except KeyboardInterrupt:
return
示例13: post_query
def post_query(self, table, start_time=None, end_time=None,
select_fields=None,
where_clause='',
sort_fields=None, sort=None, limit=None, filter=None, dir=None,
session_type=None):
res = None
try:
query_dict = OpServerUtils.get_query_dict(
table, start_time, end_time,
select_fields,
where_clause,
sort_fields, sort, limit, filter, dir,
session_type)
res = []
resp = self.post(path='analytics/query', payload=query_dict)
if resp is not None:
try:
qid = resp['href'].rsplit('/', 1)[1]
result = self.get_query_result(qid)
for item in result:
res.append(item)
except Exception as e:
if 'value' in resp:
for item in resp['value']:
res.append(item)
except Exception as e:
self.log.debug("Got exception %s"%e)
finally:
return res
示例14: query
def query(self, table, start_time=None, end_time=None,
select_fields=None,
where_clause="",
sort_fields=None, sort=None, limit=None, filter=None):
"""
This function takes in the query parameters,
format appropriately and calls
ReST API to the :mod:`opserver` to get data
:param table: table to do the query on
:type table: str
:param start_time: start_time of the query's timeperiod
:type start_time: int
:param end_time: end_time of the query's timeperiod
:type end_time: int
:param select_fields: list of columns to be returned in the
final result
:type select_fields: list of str
:param where_clause: list of match conditions for the query
:type where_clause: list of match, which is a pair of str ANDed
:returns: str -- json formatted result
:raises: Error
"""
flows_url = OpServerUtils.opserver_query_url(self._args.opserver_ip,
self._args.opserver_port)
print flows_url
query_dict = OpServerUtils.get_query_dict(table, start_time, end_time,
select_fields, where_clause,
sort_fields, sort, limit,
filter)
print json.dumps(query_dict)
resp = OpServerUtils.post_url_http(flows_url, json.dumps(query_dict))
if resp is not None:
resp = json.loads(resp)
qid = resp['href'].rsplit('/', 1)[1]
result = OpServerUtils.get_query_result(self._args.opserver_ip,
self._args.opserver_port,
qid)
for item in result:
print item
return
示例15: parse_args
def parse_args(self):
"""
Eg. python stats.py --analytics-api-ip 127.0.0.1
--analytics-api-port 8081
--table AnalyticsCpuState.cpu_info
--where name=a6s40 cpu_info.module_id=Collector
--select "T=60 SUM(cpu_info.cpu_share)"
--sort "SUM(cpu_info.cpu_share)"
[--start-time now-10m --end-time now] | --last 10m
python stats.py --table AnalyticsCpuState.cpu_info
"""
defaults = {
'analytics_api_ip': '127.0.0.1',
'analytics_api_port': '8081',
'start_time': 'now-10m',
'end_time': 'now',
'select' : [],
'where' : ['Source=*'],
'sort': []
}
parser = argparse.ArgumentParser(
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.set_defaults(**defaults)
parser.add_argument("--analytics-api-ip", help="IP address of Analytics API Server")
parser.add_argument("--analytics-api-port", help="Port of Analytcis API Server")
parser.add_argument(
"--start-time", help="Logs start time (format now-10m, now-1h)")
parser.add_argument("--end-time", help="Logs end time")
parser.add_argument(
"--last", help="Logs from last time period (format 10m, 1d)")
parser.add_argument(
"--table", help="StatTable to query", choices=STAT_TABLE_LIST)
parser.add_argument(
"--dtable", help="Dynamic StatTable to query")
parser.add_argument(
"--select", help="List of Select Terms", nargs='+')
parser.add_argument(
"--where", help="List of Where Terms to be ANDed", nargs='+')
parser.add_argument(
"--sort", help="List of Sort Terms", nargs='+')
self._args = parser.parse_args()
if self._args.table is None and self._args.dtable is None:
return -1
try:
self._start_time, self._end_time = \
OpServerUtils.parse_start_end_time(
start_time = self._args.start_time,
end_time = self._args.end_time,
last = self._args.last)
except:
return -1
return 0