本文整理汇总了Python中askomics.libaskomics.rdfdb.QueryLauncher.QueryLauncher.format_results_csv方法的典型用法代码示例。如果您正苦于以下问题:Python QueryLauncher.format_results_csv方法的具体用法?Python QueryLauncher.format_results_csv怎么用?Python QueryLauncher.format_results_csv使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类askomics.libaskomics.rdfdb.QueryLauncher.QueryLauncher
的用法示例。
在下文中一共展示了QueryLauncher.format_results_csv方法的2个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: get_value
# 需要导入模块: from askomics.libaskomics.rdfdb.QueryLauncher import QueryLauncher [as 别名]
# 或者: from askomics.libaskomics.rdfdb.QueryLauncher.QueryLauncher import format_results_csv [as 别名]
def get_value(self):
""" Build a request from a json whith the following contents :variates,constraintesRelations,constraintesFilters"""
self.log.debug("== Attribute Value ==")
data = {}
tse = TripleStoreExplorer(self.settings, self.request.session)
body = self.request.json_body
try:
results,query = tse.build_sparql_query_from_json(body["variates"],body["constraintesRelations"],body["limit"],True)
# Remove prefixes in the results table
data['values'] = results
if not body['export']:
return data
# Provide results file
ql = QueryLauncher(self.settings, self.request.session)
rb = ResultsBuilder(self.settings, self.request.session)
data['file'] = ql.format_results_csv(rb.build_csv_table(results))
except Exception as e:
#exc_type, exc_value, exc_traceback = sys.exc_info()
#traceback.print_exc(limit=8)
traceback.print_exc(file=sys.stdout)
data['values'] = ""
data['file'] = ""
data['error'] = traceback.format_exc(limit=8)+"\n\n\n"+str(e)
self.log.error(str(e))
return data
示例2: launch_query
# 需要导入模块: from askomics.libaskomics.rdfdb.QueryLauncher import QueryLauncher [as 别名]
# 或者: from askomics.libaskomics.rdfdb.QueryLauncher.QueryLauncher import format_results_csv [as 别名]
def launch_query(self):
""" Converts the constraints table created by the graph to a sparql query, send it to the database and compile the results"""
data = {}
body = self.request.json_body
export = bool(int(body['export']))
sqb = SparqlQueryBuilder(self.settings, self.request.session)
return_only_query = bool(int(body['return_only_query']))
if body['uploaded'] != '':
if export:
query = body['uploaded'].replace('LIMIT 30', 'LIMIT 10000')
else:
query = body['uploaded']
else:
query = sqb.load_from_query_json(body).query
if return_only_query:
data['query'] = query
return data
ql = QueryLauncher(self.settings, self.request.session)
rb = ResultsBuilder(self.settings, self.request.session)
results = ql.process_query(query)
if export:
data['file'] = ql.format_results_csv(rb.build_csv_table(results))
else:
entity_name_list, entity_list_attributes = rb.organize_attribute_and_entity(results, body['constraint'])
data['results_entity_name'] = entity_name_list
data['results_entity_attributes'] = entity_list_attributes
data['results'] = [
{
k: res[k].replace(self.settings["askomics.prefix"], '')
for k in res.keys()
}
for res in results
]
self.log.debug("== results ==")
for elt in results:
self.log.debug(elt)
# data['query'] = query
return data