本文整理汇总了Python中rdflib.Graph.update方法的典型用法代码示例。如果您正苦于以下问题:Python Graph.update方法的具体用法?Python Graph.update怎么用?Python Graph.update使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类rdflib.Graph
的用法示例。
在下文中一共展示了Graph.update方法的8个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: RemoteStore
# 需要导入模块: from rdflib import Graph [as 别名]
# 或者: from rdflib.Graph import update [as 别名]
class RemoteStore():
def __init__(self, endpoint):
self._store = sparqlstore.SPARQLUpdateStore()
self._store.open((endpoint, endpoint))
self.g = Graph(self._store, URIRef('urn:x-arq:DefaultGraph'))
def update(self, triples_as_nt):
return self.g.update("INSERT DATA { %s }" % triples_as_nt)
def delete(self, triples_as_nt):
return self.g.update("DELETE DATA { %s }" % triples_as_nt)
示例2: test_issue579
# 需要导入模块: from rdflib import Graph [as 别名]
# 或者: from rdflib.Graph import update [as 别名]
def test_issue579():
g = Graph()
g.bind('foaf', FOAF)
n = Namespace("http://myname/")
g.add((n.bob, FOAF.name, Literal('bb')))
# query is successful.
assert len(g.query("select ?n where { ?n foaf:name 'bb' . }")) == 1
# update is not.
g.update("delete where { ?e foaf:name 'ss' .}")
assert len(g) == 1
g.update("delete where { ?e foaf:name 'bb' .}")
assert len(g) == 0
示例3: Store
# 需要导入模块: from rdflib import Graph [as 别名]
# 或者: from rdflib.Graph import update [as 别名]
class Store():
'''
This class is a wrapper for the Graph class that
handles ontology binding and triples serialization.
'''
def __init__(self, endpoint=None):
if endpoint is None:
self.g = Graph()
else:
self._store = sparqlstore.SPARQLUpdateStore()
self._store.open((endpoint, endpoint))
self.g = Graph(self._store, URIRef('urn:x-arq:DefaultGraph'))
self.ns = {}
def bind_namespaces(self, namespaces):
for ns in namespaces:
# ns is the prefix and the key
self.g.bind(ns, Namespace(namespaces[ns]))
self.ns[ns] = Namespace(namespaces[ns])
def get_namespaces(self):
ns = []
for namespace in self.g.namespaces():
ns.append(namespace)
return ns
def get_resource(self, urn):
return self.g.resource(urn)
def add_triple(self, s, v, p):
self.g.add((s, v, p))
def serialize(self, format):
return self.g.serialize(format=format)
def update(self):
return self.g.update(
"INSERT DATA { %s }" % self.g.serialize(format='nt'))
示例4: buscar_vuelos
# 需要导入模块: from rdflib import Graph [as 别名]
# 或者: from rdflib.Graph import update [as 别名]
#.........这里部分代码省略.........
gresp.add((vlo_obj_go, myns_atr.airportSalida, Literal(Goairname)))
gresp.add((vlo_obj_go, myns_atr.airportLlegada, Literal(Goairllname)))
gresp.add((vlo_obj_go, myns_atr.dura, Literal(durationGo)))
# Fecha y hora de salida y aterrizaje de la ida
horaGoSale = trip['slice'][0]['segment'][0]['leg'][0]['departureTime']
horaGoLlega = trip['slice'][0]['segment'][0]['leg'][0]['arrivalTime']
gresp.add((vlo_obj_go, myns_atr.hora_sale, Literal(horaGoSale)))
gresp.add((vlo_obj_go, myns_atr.hora_llega, Literal(horaGoLlega)))
# Terminal y ciudad de salida de la ida
terminalGoSale = "unknown"
if 'originTerminal' in trip['slice'][0]['segment'][0]['leg'][0]:
terminalGoSale = trip['slice'][0]['segment'][0]['leg'][0]['originTerminal']
terminalGoLlega = "unknown"
if 'destinationTerminal' in trip['slice'][0]['segment'][0]['leg'][0]:
terminalGoLlega = trip['slice'][0]['segment'][0]['leg'][0]['destinationTerminal']
gresp.add((vlo_obj_go, myns_atr.terminal_sale, Literal(terminalGoSale)))
gresp.add((vlo_obj_go, myns_atr.terminal_llega, Literal(terminalGoLlega)))
# Direccion de la ida (redundante)
ciudadGoSale = trip['slice'][0]['segment'][0]['leg'][0]['origin']
ciudadGoLlega = trip['slice'][0]['segment'][0]['leg'][0]['destination']
gresp.add((vlo_obj_go, myns_atr.ciudad_sale, Literal(ciudadGoSale)))
gresp.add((vlo_obj_go, myns_atr.ciudad_llega, Literal(ciudadGoLlega)))
# DATOS VUELTA
# Id unico para la vuelta del roundtrip
idBack = trip['slice'][1]['segment'][0]['flight']['number'] +trip['slice'][1]['segment'][0]['flight']['carrier']
vlo_obj_back = myns_vlo[idBack]
# El roundtrip tiene esta vuelta
gresp.add((rndtrip_obj, myns_atr.vuelta, vlo_obj_back))
originid = trip['slice'][1]['segment'][0]['leg'][0]['origin']
#este puede ser code
Gonameid = [x['code'] for x in dic['trips']['data']['airport']].index(originid)
Goairname = dic['trips']['data']['airport'][Gonameid]['name']
destinationid = trip['slice'][1]['segment'][0]['leg'][0]['destination']
#este puede ser code
Gonameid = [x['code'] for x in dic['trips']['data']['airport']].index(destinationid)
Goairllname = dic['trips']['data']['airport'][Gonameid]['name']
gresp.add((vlo_obj_back, myns_atr.airportSalida, Literal(Goairname)))
gresp.add((vlo_obj_back, myns_atr.airportLlegada, Literal(Goairllname)))
# Cuanto dura esta vuelta
durationBack = trip['slice'][1]['duration']
gresp.add((vlo_obj_back, myns_atr.dura, Literal(durationBack)))
# Fecha y hora de salida y aterrizaje de la vuelta
horaBackSale = trip['slice'][1]['segment'][0]['leg'][0]['departureTime']
horaBackLlega = trip['slice'][1]['segment'][0]['leg'][0]['arrivalTime']
gresp.add((vlo_obj_back, myns_atr.hora_sale, Literal(horaBackSale)))
gresp.add((vlo_obj_back, myns_atr.hora_llega, Literal(horaBackLlega)))
# Terminal y ciudad de salida de la vuelt
terminalBackSale = "unknown"
if 'originTerminal' in trip['slice'][1]['segment'][0]['leg'][0]:
terminalBackSale = trip['slice'][1]['segment'][0]['leg'][0]['originTerminal']
terminalBackLlega = "unknown"
if 'destinationTerminal' in trip['slice'][1]['segment'][0]['leg'][0]:
terminalBackLlega = trip['slice'][1]['segment'][0]['leg'][0]['destinationTerminal']
gresp.add((vlo_obj_back, myns_atr.terminal_sale, Literal(terminalBackSale)))
gresp.add((vlo_obj_back, myns_atr.terminal_llega, Literal(terminalBackLlega)))
# Direccion de la vuelta (redundante)
ciudadBackSale = trip['slice'][1]['segment'][0]['leg'][0]['origin']
ciudadBackLlega = trip['slice'][1]['segment'][0]['leg'][0]['destination']
gresp.add((vlo_obj_back, myns_atr.ciudad_sale, Literal(ciudadBackSale)))
gresp.add((vlo_obj_back, myns_atr.ciudad_llega, Literal(ciudadBackLlega)))
endpoint = 'http://localhost:5820/flight/query'
store = sparqlstore.SPARQLUpdateStore()
store.open((endpoint, endpoint))
default_graph = URIRef('http://example.org/default-graph')
ng = Graph(store, identifier=default_graph)
ng = ng.update(u'INSERT DATA { %s }' % gresp.serialize(format='nt'))
gresp.serialize('f.rdf')
else:
print "AgentFlightsGoogle => We read from cache"
# print "GRAFO DE RESPUESTA"
# for s, p, o in gresp:
# print 's: ' + s
# print 'p: ' + p
# print 'o: ' + o
# print '\n'
endpoint = 'http://localhost:5820/flight/query'
store = sparqlstore.SPARQLUpdateStore()
store.open((endpoint, endpoint))
default_graph = URIRef('http://example.org/default-graph')
ng = Graph(store, identifier=default_graph)
gresp = ng
#gresp.parse('f.rdf' ,format='xml')
print "repuesta"
return gresp
示例5: Graph
# 需要导入模块: from rdflib import Graph [as 别名]
# 或者: from rdflib.Graph import update [as 别名]
# test for https://github.com/RDFLib/rdflib/issues/579
from rdflib import Graph, URIRef, Literal, Namespace
from rdflib.namespace import FOAF, RDF
g = Graph()
g.bind("foaf", FOAF)
n = Namespace("http://myname/")
g.add((n.bob, FOAF.name, Literal("bb")))
# query is successful.
assert len(g.query("select ?n where { ?n foaf:name 'bb' . }")) == 1
# update is not.
g.update("delete where { ?e foaf:name 'ss' .}")
assert len(g) == 1
g.update("delete where { ?e foaf:name 'bb' .}")
assert len(g) == 0
示例6: buscar_hoteles
# 需要导入模块: from rdflib import Graph [as 别名]
# 或者: from rdflib.Graph import update [as 别名]
#.........这里部分代码省略.........
#comparar tiempo para acceder cache
# print LOG_TAG+"checking cache"
# tDelta = datetime.datetime.now() - requestTime
# days, seconds = tDelta.days, tDelta.seconds
# hours = days * 24 + seconds // 3600
# minutes = (seconds % 3600) // 60
# seconds = seconds % 60
# print LOG_TAG+"resolving timestamp"
# b = (minutes < CACHE_TIME_CONST)
b = cache
if b == False:
print "AgentHotel => We make a new service request; cant rely on cache"
geolocator = Nominatim()
location = geolocator.geocode(destinationCity + ", " + destinationCountry,timeout=10)
print ((location.latitude, location.longitude))
r = requests.get(EAN_END_POINT,
params={'cid': EAN_DEV_CID,
'minorRev': minorRev,
'apiKey': EAN_KEY,
'sig': sig,
'locale': 'es_ES',
'currencyCode': 'EUR',
'numberOfResults': 50,
'latitude': location.latitude,
'longitude': location.longitude,
'searchRadius': searchRadius,
'searchRadiusUnit': "KM",
'arrivalDate': arrivaldepDStr,
'departureDate': departuredepDStr,
'numberOfAdults': numberOfAdults,
'numberOfChildren': numberOfChildren,
'propertyCategory': propertyCategory
})
#print r.text
dic = r.json()
out_file = open("h.json","w")
# Save the dictionary into this file
# (the 'indent=4' is optional, but makes it more readable)
json.dump(dic,out_file, indent=4)
#print json.dumps(dic, indent=4, sort_keys=True)
# Hago bind de las ontologias que usaremos en el grafo
gresp.bind('myns_pet', myns_pet)
gresp.bind('myns_atr', myns_atr)
gresp.bind('myns_hot', myns_hot)
if 'EanWsError' in dic['HotelListResponse']:
print ('Error de tipo ' + dic['HotelListResponse']['EanWsError']['category'],
' => ' + dic['HotelListResponse']['EanWsError']['verboseMessage'])
#gresp = build_message(Graph(), ACL['not-understood'], sender=AgentHotel.uri)
else:
print len(dic['HotelListResponse']['HotelList']['HotelSummary'])
for hot in dic['HotelListResponse']['HotelList']['HotelSummary']:
# print ("Hotel " + hot['name'],
# ", distancia del centro: " + '{:.2f}'.format(hot['proximityDistance']),
# ' ' + hot['proximityUnit'] + ', precio total: ',
# hot['RoomRateDetailsList']['RoomRateDetails']['RateInfos']['RateInfo']['ChargeableRateInfo']['@total'],
# ', rating: ' + '{:.1f}'.format(hot['hotelRating']),
# ', tripAdvisorRating: ' + '{:.1f}'.format(hot['tripAdvisorRating']),
# ' tripAdvisorReviewCount: ' + '{:.0f}'.format(hot['tripAdvisorReviewCount'])
# )
hotel = hot['hotelId']
hot_obj = myns_hot[hotel]
gresp.add((hot_obj, myns_atr.esUn, myns.hotel))
gresp.add((hot_obj, myns_atr.ciudad, Literal(hot['city'])))
gresp.add((hot_obj, myns_atr.codigoPostal, Literal(hot['postalCode'])))
gresp.add((hot_obj, myns_atr.descripcionDeHabitacion, Literal(hot['RoomRateDetailsList']['RoomRateDetails']['roomDescription'])))
gresp.add((hot_obj, myns_atr.adresa, Literal(hot['address1'])))
gresp.add((hot_obj, myns_atr.nombre, Literal(hot['name'])))
gresp.add((hot_obj, myns_atr.descriptionCorta, Literal(hot['shortDescription'])))
gresp.add((hot_obj, myns_atr.distanciaRepectoAlCentro, Literal(hot['proximityDistance'])))
gresp.add((hot_obj, myns_atr.distanciaRepectoAlCentro_unidad, Literal(hot['proximityUnit'])))
gresp.add((hot_obj, myns_atr.cuesta, Literal(hot['RoomRateDetailsList']['RoomRateDetails']['RateInfos']['RateInfo']['ChargeableRateInfo']['@total'])))
gresp.add((hot_obj, myns_atr.rating, Literal(hot['hotelRating'])))
gresp.add((hot_obj, myns_atr.tripAdvisorRating, Literal(hot['tripAdvisorRating'])))
gresp.add((hot_obj, myns_atr.tripAdvisorReviewCount, Literal(hot['tripAdvisorReviewCount'])))
endpoint = 'http://localhost:5820/hotel/query'
store = sparqlstore.SPARQLUpdateStore()
store.open((endpoint, endpoint))
default_graph = URIRef('http://example.org/default-graph')
ng = Graph(store, identifier=default_graph)
ng = ng.update(u'INSERT DATA { %s }' % gresp.serialize(format='nt'))
gresp.serialize('h.rdf')
else:
print "AgentHotel => We read from cache"
endpoint = 'http://localhost:5820/hotel/query'
store = sparqlstore.SPARQLUpdateStore()
store.open((endpoint, endpoint))
default_graph = URIRef('http://example.org/default-graph')
ng = Graph(store, identifier=default_graph)
gresp = ng
#gresp.parse('h.rdf' ,format='xml')
print "retornar repuesta"
return gresp
示例7: Test
# 需要导入模块: from rdflib import Graph [as 别名]
# 或者: from rdflib.Graph import update [as 别名]
class Test(unittest.TestCase):
def setUp(self):
self.g = Graph()
for rg in row_graphs_from_file(StringIO.StringIO(EXAMPLE)):
self.g += rg
self.len_before_update = len(self.g)
self.csv = csv.DictReader(StringIO.StringIO(EXAMPLE))
def assertLastUpdateAdded(self, n):
self.assertEquals(n, len(self.g) - self.len_before_update)
def _update(self, query):
self.len_before_update = len(self.g)
self.g.update(ukeof.expand_and_parse(query)())
def testInsertType(self):
self._update(ukeof.INSERT_TYPE)
self.assertLastUpdateAdded(14)
for activity_uri in self.g[: RDF.type : SEPAKE.UKEOFActivity]:
self.assertIn(Literal(activity_uri),
self.g[activity_uri : PROV.wasInfluencedBy / RDFS.member / CSV.fieldValue])
def testInsertLabel(self):
self._update(ukeof.INSERT_TYPE)
self._update(ukeof.INSERT_LABEL)
self.assertLastUpdateAdded(7)
for csv_row in self.csv:
if csv_row['Type'] == 'Activity':
self.assertEquals(csv_row['Title'],
self.g.value(uri(csv_row), RDFS.label).value)
def testInsertHomepage(self):
self._update(ukeof.INSERT_TYPE)
self._update(ukeof.INSERT_HOMEPAGE)
self.assertLastUpdateAdded(7)
for csv_row in self.csv:
if csv_row['Type'] == 'Activity':
self.assertEquals(self.g.value(uri(csv_row), FOAF.homepage),
uri(csv_row))
def testInsertLeadorg(self):
self._update(ukeof.INSERT_TYPE)
self._update(ukeof.INSERT_LEAD_ORG)
self.assertLastUpdateAdded(28)
for csv_row in self.csv:
if csv_row['Type'] == 'Activity':
lead = self.g.value(predicate = SEPAKE.owns, object = uri(csv_row))
self.assertEquals(SEPAKE.UKEOFOrganisation,
self.g.value(lead, RDF.type))
self.assertEquals(csv_row['Lead organisation'],
self.g.value(lead, RDFS.label).value)
def testInsertComment(self):
self._update(ukeof.INSERT_TYPE)
self._update(ukeof.INSERT_COMMENT)
self.assertLastUpdateAdded(7)
for csv_row in self.csv:
if csv_row['Type'] == 'Activity':
desc = self.g.value(uri(csv_row), SEPAKE.htmlDescription)
for key in ['Description', 'Objectives', 'Reasons for collection']:
self.assertGreater(desc.find(csv_row[key]), -1, 'Failed to find %s="%s" in "%s"' % (key, csv_row[key], desc))
def testInsertStartDate(self):
self._update(ukeof.INSERT_TYPE)
self._update(ukeof.INSERT_START_DATE)
self.assertLastUpdateAdded(7)
for csv_row in self.csv:
if csv_row['Type'] == 'Activity' and len(csv_row['Lifespan start']) > 0:
self.assertEquals(datetime.datetime.strptime(csv_row['Lifespan start'], '%Y-%m-%d').date(),
self.g.value(URIRef(csv_row['Link to full record']), PROV.startedAtTime).value
)
def testInsertEndDate(self):
self._update(ukeof.INSERT_TYPE)
self._update(ukeof.INSERT_END_DATE)
self.assertLastUpdateAdded(7)
for csv_row in self.csv:
if csv_row['Type'] == 'Activity' and len(csv_row['Lifespan end']) > 0:
self.assertEquals(datetime.datetime.strptime(csv_row['Lifespan end'], '%Y-%m-%d').date(),
self.g.value(URIRef(csv_row['Link to full record']), PROV.endedAtTime).value
)
示例8: buscar_actividades
# 需要导入模块: from rdflib import Graph [as 别名]
# 或者: from rdflib.Graph import update [as 别名]
def buscar_actividades(destinationCity="Barcelona", destinationCountry="Spain", radius=20000, types=["museum"], cache = True):
location= destinationCity+", "+destinationCountry
gr = Graph()
b = cache
print location
if b == False:
print "INFO AgenteActividades => Recibo peticion de actividades."
print "AgenteActividades => We make a new service request; cant rely on cache"
google_places = GooglePlaces(GOOGLEAPI_KEY)
# You may prefer to use the text_search API, instead.
query_result = google_places.nearby_search(location=location, radius=radius, types=types)
out_file = open("a.json","w")
# Save the dictionary into this file
# (the 'indent=4' is optional, but makes it more readable)
json.dump(query_result.raw_response,out_file, indent=4)
# Save the dictionary into this file
# (the 'indent=4' is optional, but makes it more readable)
print LOG_TAG + " => built query"
if query_result.has_attributions:
print query_result.html_attributions
print LOG_TAG + " => about to build response Graph"
# Grafo donde retornaremos el resultado
gr = Graph()
# Hago bind de las ontologias que usaremos en el grafo
gr.bind('myns_pet', myns_pet)
gr.bind('myns_atr', myns_atr)
gr.bind('myns_act', myns_act)
# TODO: ANADIR TIPO DE ACTIVIDAD PARA RECORRER EL GRAFO
print len(query_result.places)
for place in query_result.places:
# Identificador unico para cada actividad
# Lo de -Found no se si hace falta en verdad...
plc_obj = myns_act[place.place_id]
# Ponemos el nombre y localizacion de la actividad
gr.add((plc_obj, myns_atr.esUn, myns.actividad))
gr.add((plc_obj, myns_atr.tipo, Literal(types[0])))
gr.add((plc_obj, myns_atr.nombre, Literal(place.name)))
gr.add((plc_obj, myns_atr.localizacion, Literal(place.geo_location)))
# Otra llamada a la API para los otros datos
place.get_details()
if place.rating:
gr.add((plc_obj, myns_atr.rating, Literal(place.rating)))
else :
gr.add((plc_obj, myns_atr.rating, Literal(0)))
gr.add((plc_obj, myns_atr.direccion, Literal(place.formatted_address)))
gr.add((plc_obj, myns_atr.Descripcion, Literal(place.details)))
gr.add((plc_obj, myns_atr.paisciudad, Literal(location)))
gr.add((plc_obj, myns_atr.googleUrl, Literal(place.url)))
gr.add((plc_obj, myns_atr.website, Literal(place.website)))
gr.add((plc_obj, myns_atr.tel_int, Literal(place.international_phone_number)))
# VERBOSE
# Por si queremos mas detalles en el futuro
#pprint.pprint(place.details) # A dict matching the JSON response from Google.
#print place.local_phone_number
guax = Graph()
guax.parse('a.rdf' ,format='xml')
guax += gr
guax.serialize('a.rdf')
endpoint = 'http://localhost:5820/actividad/query'
store = sparqlstore.SPARQLUpdateStore()
store.open((endpoint, endpoint))
default_graph = URIRef('http://example.org/default-graph')
ng = Graph(store, identifier=default_graph)
ng = ng.update(u'INSERT DATA { %s }' % gr.serialize(format='nt'))
else:
gaux2 = Graph()
print "AgenteActividades => We read from cache"
gaux2.parse('a.rdf' ,format='xml')
gaux2.triples((None, myns_atr.paisciudad, Literal(location)))
lisy = []
for a,b,c in gaux2:
if gaux2.value(subject= a, predicate= myns_atr.tipo) == Literal(types[0]):
lisy.append(a)
for a in lisy:
gr += gaux2.triples((a, None, None))
endpoint = 'http://localhost:5820/actividad/query'
store = sparqlstore.SPARQLUpdateStore()
store.open((endpoint, endpoint))
#.........这里部分代码省略.........