本文整理汇总了Python中askomics.libaskomics.rdfdb.SparqlQueryBuilder.SparqlQueryBuilder类的典型用法代码示例。如果您正苦于以下问题:Python SparqlQueryBuilder类的具体用法?Python SparqlQueryBuilder怎么用?Python SparqlQueryBuilder使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了SparqlQueryBuilder类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: positionable_attr
def positionable_attr(self):
"""
Return the positionable attributes in common between two positionable entity
"""
#FIXEME: Rewrite this ugly method
body = self.request.json_body
data = {}
sqb = SparqlQueryBuilder(self.settings, self.request.session)
ql = QueryLauncher(self.settings, self.request.session)
# Check if the two entity are positionable
positionable1 = ql.process_query(sqb.get_if_positionable(body['node']).query)
positionable2 = ql.process_query(sqb.get_if_positionable(body['node']).query)
if positionable1 == 0 or positionable2 == 0:
data['error'] = 'not positionable nodes'
return data
results = ql.process_query(sqb.get_common_positionable_attributes(body['node'], body['second_node']).query)
self.log.debug(results)
data['results'] = {}
list_pos_attr = []
for elem in results:
if elem['pos_attr'] not in list_pos_attr:
list_pos_attr.append(elem['pos_attr'].replace("http://www.semanticweb.org/irisa/ontologies/2016/1/igepp-ontology#", ""))
for elem in list_pos_attr:
data['results'][elem] = False not in [bool(int(p['status'])) for p in results if p['pos_attr'] == "http://www.semanticweb.org/irisa/ontologies/2016/1/igepp-ontology#"+elem]
return data
示例2: empty
def empty(self):
#empty database
sqb = SparqlQueryBuilder(self.settings, self.request.session)
ql = QueryLauncher(self.settings, self.request.session)
namedGraphs = self.list_named_graphs()
for graph in namedGraphs:
ql.execute_query(sqb.get_delete_query_string(graph).query)
示例3: insert_metadatas
def insert_metadatas(self,accessL):
"""
Insert the metadatas into the parent graph
"""
self.log.debug('--- insert_metadatas ---')
sqb = SparqlQueryBuilder(self.settings, self.session)
query_laucher = QueryLauncher(self.settings, self.session)
valAcces = 'public' if accessL else 'private'
ttl = '<' + self.graph + '> prov:generatedAtTime "' + self.timestamp + '"^^xsd:dateTime .\n'
ttl += '<' + self.graph + '> dc:creator "' + self.session['username'] + '" .\n'
ttl += '<' + self.graph + '> :accessLevel "' + valAcces + '" .\n'
ttl += '<' + self.graph + '> foaf:Group "' + self.session['group'] + '" .\n'
ttl += '<' + self.graph + '> prov:wasDerivedFrom "' + self.name + '" .\n'
ttl += '<' + self.graph + '> dc:hasVersion "' + get_distribution('Askomics').version + '" .\n'
ttl += '<' + self.graph + '> prov:describesService "' + os.uname()[1] + '" .\n'
if self.is_defined("askomics.endpoint"):
ttl += '<' + self.graph + '> prov:atLocation "' + self.get_param("askomics.endpoint") + '" .\n'
else:
raise ValueError("askomics.endpoint does not exit.")
sparql_header = sqb.header_sparql_config('')
query_laucher.insert_data(ttl, self.graph, sparql_header)
示例4: compare_file_to_database
def compare_file_to_database(self, headers):
""" Ask the database to compare the headers of a file to convert to the corresponding class in the database """
curr_entity = headers[0]
sqb = SparqlQueryBuilder(self.settings, self.session)
ql = QueryLauncher(self.settings, self.session)
sparql_template = self.get_template_sparql(self.ASKOMICS_get_class_info_from_abstraction_queryFile)
query = sqb.load_from_file(sparql_template, {"#nodeClass#": curr_entity}).query
results = ql.process_query(query)
if results == []:
return [], headers, []
bdd_relations, new_headers, missing_headers, present_headers = [], [], [], []
for result in results:
bdd_relation = result["relation"].replace(self.get_param("askomics.prefix"), "").replace("has_", "")
bdd_relations.append(bdd_relation)
if bdd_relation not in headers:
self.log.warning('Relation "%s" not found in tables columns: %s.', bdd_relation, repr(headers))
missing_headers.append(bdd_relation)
for header in headers:
if header != curr_entity:
if header not in bdd_relations:
self.log.info('Adding column "%s".', header)
new_headers.append(header)
elif header not in missing_headers:
present_headers.append(header)
return missing_headers, new_headers, present_headers
示例5: deleteMoState
def deleteMoState(self,urimo):
self.log.debug(' ***** Delete module '+urimo+' on TPS ***** ')
sqb = SparqlQueryBuilder(self.settings, self.session)
ql = QueryLauncher(self.settings, self.session)
ql.execute_query(sqb.prepare_query(
"""
DELETE WHERE { GRAPH <"""+self.graph_modules+"""> { <"""+urimo+"""> ?p ?o } }
"""
).query)
示例6: empty_database
def empty_database(self):
"""
Delete all triples in the triplestore
"""
self.log.debug("=== DELETE ALL TRIPLES ===")
sqb = SparqlQueryBuilder(self.settings, self.request.session)
ql = QueryLauncher(self.settings, self.request.session)
ql.execute_query(sqb.get_delete_query_string().query)
示例7: test_print_ids
def test_print_ids(self):
from askomics.libaskomics.rdfdb.SparqlQueryBuilder import SparqlQueryBuilder
request = testing.DummyRequest()
sqb = SparqlQueryBuilder(self.settings, request.session)
graph = {'limit': 30, 'return_only_query': False, 'filter_cat': [], 'constraint': [{'type': 'node', 'id': 'entity1', 'uri': 'http://www.semanticweb.org/irisa/ontologies/2016/1/igepp-ontology#entity'}], 'filter_str': [{'id': 'entity1', 'value': 'xxxx'}], 'display': [{'id': 'entity1'}, {}], 'export': 0, 'filter_num': [], 'uploaded': ''}
query = sqb.load_from_query_json(graph).query
self.assertIn('?entity1 a :entity .\n\tFILTER (regex(str(?entity1), "xxxx", "i")) .', query)
示例8: list_named_graphs
def list_named_graphs(self):
sqb = SparqlQueryBuilder(self.settings, self.request.session)
ql = QueryLauncher(self.settings, self.request.session)
res = ql.execute_query(sqb.get_list_named_graphs().query)
namedGraphs = []
for indexResult in range(len(res['results']['bindings'])):
namedGraphs.append(res['results']['bindings'][indexResult]['g']['value'])
return namedGraphs
示例9: launch_query
def launch_query(self):
""" Converts the constraints table created by the graph to a sparql query, send it to the database and compile the results"""
data = {}
body = self.request.json_body
export = bool(int(body['export']))
sqb = SparqlQueryBuilder(self.settings, self.request.session)
return_only_query = bool(int(body['return_only_query']))
if body['uploaded'] != '':
if export:
query = body['uploaded'].replace('LIMIT 30', 'LIMIT 10000')
else:
query = body['uploaded']
else:
query = sqb.load_from_query_json(body).query
if return_only_query:
data['query'] = query
return data
ql = QueryLauncher(self.settings, self.request.session)
rb = ResultsBuilder(self.settings, self.request.session)
results = ql.process_query(query)
if export:
data['file'] = ql.format_results_csv(rb.build_csv_table(results))
else:
entity_name_list, entity_list_attributes = rb.organize_attribute_and_entity(results, body['constraint'])
data['results_entity_name'] = entity_name_list
data['results_entity_attributes'] = entity_list_attributes
data['results'] = [
{
k: res[k].replace(self.settings["askomics.prefix"], '')
for k in res.keys()
}
for res in results
]
self.log.debug("== results ==")
for elt in results:
self.log.debug(elt)
# data['query'] = query
return data
示例10: get_attributes_of
def get_attributes_of(self, uri):
"""
Get all attributes of a node class (identified by his uri). These
attributes are known thanks to the domain knowledge of your RDF database.
:param uri: Uri of the node class
:type uri: str
:return: All attributes of a node class
:rtype: Attribute list
"""
self.log.debug(" =========== TripleStoreExplorer:get_attributes_of ===========")
attributes = []
results = []
sqb = SparqlQueryBuilder(self.settings, self.session)
ql = QueryLauncher(self.settings, self.session)
fragment = uri.rsplit('#', 1)[-1]
parent = fragment + str(self.get_new_id(fragment))
# Send a request to know all the neighbors of a node (from uri).
sparql_template = self.get_template_sparql(self.ASKOMICS_neighbor_query_file)
query = sqb.load_from_file(sparql_template, {
"nodeClass": '<%s>' % uri,
"neighborClass": "?nodeUri"
}).query
results = (ql.process_query(query))
for result in results:
neighbor_uri = result["relationUri"]
if 'nodeLabel' in result:
neighbor_label = result["nodeLabel"]
else:
neighbor_label = result["relationLabel"]
neighbor_id = neighbor_label + str(self.get_new_id(neighbor_label))
if self.has_setting(result["nodeUri"], 'attribute') or self.has_setting(neighbor_uri, 'attribute'):
attributes.append(
Attribute(neighbor_id,
neighbor_uri,
result["nodeUri"],
neighbor_label,
parent)
)
return attributes
示例11: delete_graph
def delete_graph(self):
"""
Delete triples from a list of graph
"""
self.log.debug("=== DELETE SELECTED GRAPHS ===")
sqb = SparqlQueryBuilder(self.settings, self.request.session)
ql = QueryLauncher(self.settings, self.request.session)
graphs = self.request.json_body['namedGraphs']
for graph in graphs:
self.log.debug("--- DELETE GRAPH : %s", graph)
ql.execute_query(sqb.get_drop_named_graph(graph).query)
#delete metadatas
ql.execute_query(sqb.get_delete_metadatas_of_graph(graph).query)
示例12: moStateOnTPS
def moStateOnTPS(self):
'''
check if module files state is saved on the TPS.
if not all modules files are saved with the unchecked status !
'''
sqb = SparqlQueryBuilder(self.settings, self.session)
ql = QueryLauncher(self.settings, self.session)
results = ql.process_query(sqb.build_query_on_the_fly({
'select': '?uri ?module ?comment ?version ?graph ?state',
'from' : [self.graph_modules],
'query': '{\n'+
'?uri rdfs:label ?module .\n'+
'?uri rdfs:comment ?comment .\n'+
'?uri :module_version ?version .\n'+
'?uri :module_state ?state .\n'+
'OPTIONAL { ?uri :module_graph ?graph . } \n'+
'}\n'
}, True).query)
self.log.debug(' ***** module on TPS ***** ')
listMoOnTps = {}
for result in results:
result['checked'] = (result['state'] == "ok")
result['wait'] = (result['state'] == "wait")
listMoOnTps[result['module']] = 0
self.log.debug('module : '+result['module'])
#=======================*************** A ENLEVER *********=======================================
#pour debugger
#if result['wait'] :
# result['wait'] = False
#==============================================================
self.log.debug(' ***** check Available Modules ***** ')
requestAgain = False
for mo in self.moduleFiles:
self.log.debug(" --> module "+mo);
if mo not in listMoOnTps:
self.log.debug(" --====== > new module < =======");
self.importMoSate(self.moduleFiles[mo],'off')
requestAgain = True
if requestAgain :
return False
return results
示例13: get_metadatas
def get_metadatas(self):
"""
Create metadatas and insert them into AskOmics main graph.
"""
self.log.debug("====== INSERT METADATAS ======")
sqb = SparqlQueryBuilder(self.settings, self.session)
ql = QueryLauncher(self.settings, self.session)
ttlMetadatas = "<" + self.metadatas['graphName'] + "> " + "prov:generatedAtTime " + '"' + self.metadatas['loadDate'] + '"^^xsd:dateTime .\n'
ttlMetadatas += "<" + self.metadatas['graphName'] + "> " + "dc:creator " + '"' + self.metadatas['username'] + '"^^xsd:string .\n'
ttlMetadatas += "<" + self.metadatas['graphName'] + "> " + "prov:wasDerivedFrom " + '"' + self.metadatas['fileName'] + '"^^xsd:string .\n'
ttlMetadatas += "<" + self.metadatas['graphName'] + "> " + "dc:hasVersion " + '"' + self.metadatas['version'] + '"^^xsd:string .\n'
ttlMetadatas += "<" + self.metadatas['graphName'] + "> " + "prov:describesService " + '"' + self.metadatas['server'] + '"^^xsd:string .'
sparqlHeader = sqb.header_sparql_config("")
ql.insert_data(ttlMetadatas, self.get_param("askomics.graph"), sparqlHeader)
示例14: get_list_named_graphs
def get_list_named_graphs(self):
"""
Return a list with all the named graphs.
"""
self.log.debug("=== LIST OF NAMED GRAPHS ===")
sqb = SparqlQueryBuilder(self.settings, self.request.session)
ql = QueryLauncher(self.settings, self.request.session)
res = ql.execute_query(sqb.get_list_named_graphs().query)
namedGraphs = []
for indexResult in range(len(res['results']['bindings'])):
namedGraphs.append(res['results']['bindings'][indexResult]['g']['value'])
return namedGraphs
示例15: has_category
def has_category(self, entity, category, uri_category):
"""
Get different categories for a node class.
:param entity : Name of the entity associated with the node
:param uri: Name of the node
:type uri: str
:return: List of categories
:rtype: list
"""
self.log.debug(" =========== TripleStoreExplorer:has_category ===========[ entity:"+ entity + ", category:"+ category+"]")
sqb = SparqlQueryBuilder(self.settings, self.session)
ql = QueryLauncher(self.settings, self.session)
sparql_template = self.get_template_sparql(self.ASKOMICS_has_category_query_file)
query = sqb.load_from_file(sparql_template, {"#nodeClass#": uri_category, "#category#" : category, "#entity#" : entity}).query
results = ql.process_query(query)
return [res["label"] for res in results]