本文整理汇总了Python中stetl.postgis.PostGIS.commit方法的典型用法代码示例。如果您正苦于以下问题:Python PostGIS.commit方法的具体用法?Python PostGIS.commit怎么用?Python PostGIS.commit使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类stetl.postgis.PostGIS
的用法示例。
在下文中一共展示了PostGIS.commit方法的5个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: PostgresInsertOutput
# 需要导入模块: from stetl.postgis import PostGIS [as 别名]
# 或者: from stetl.postgis.PostGIS import commit [as 别名]
class PostgresInsertOutput(PostgresDbOutput):
"""
Output by inserting a single record in a Postgres database table.
Input is a Stetl record (Python dict structure) or a list of records.
Creates an INSERT for Postgres to insert each single record.
When the "replace" parameter is True, any existing record keyed by "key" is
attempted to be UPDATEd first.
NB a constraint is that the first and each subsequent each record needs to contain
all values as an INSERT and UPDATE query template is built once for the columns
in the first record.
consumes=[FORMAT.record_array, FORMAT.record]
"""
# Start attribute config meta
@Config(ptype=str, required=False, default='public')
def table(self):
"""
Table for inserts.
"""
pass
@Config(ptype=bool, required=False, default=False)
def replace(self):
"""
Replace record if exists?
"""
pass
@Config(ptype=str, required=False, default=None)
def key(self):
"""
The key column name of the table, required when replacing records.
"""
pass
# End attribute config meta
def __init__(self, configdict, section, consumes=FORMAT.record):
DbOutput.__init__(self, configdict, section, consumes=[FORMAT.record_array, FORMAT.record])
self.query = None
self.update_query = None
self.db = None
def init(self):
# Connect only once to DB
log.info('Init: connect to DB')
self.db = PostGIS(self.cfg.get_dict())
self.db.connect()
def exit(self):
# Disconnect from DB when done
log.info('Exit: disconnect from DB')
self.db.disconnect()
def create_query(self, record):
# We assume that all records do the same INSERT key/values
# See http://grokbase.com/t/postgresql/psycopg/12735bvkmv/insert-into-with-a-dictionary-or-generally-with-a-variable-number-of-columns
# e.g. INSERT INTO lml_files ("file_name", "file_data") VALUES (%s,%s)
query = "INSERT INTO %s (%s) VALUES (%s)" % (
self.cfg.get('table'), ",".join(['%s' % k for k in record]), ",".join(["%s", ] * len(record.keys())))
log.info('query is %s', query)
return query
def create_update_query(self, record):
# We assume that all records do the same UPDATE key/values
# https://stackoverflow.com/questions/1109061/insert-on-duplicate-update-in-postgresql/6527838#6527838
# e.g. UPDATE table SET field='C', field2='Z' WHERE id=3;
query = "UPDATE %s SET (%s) = (%s) WHERE %s = %s" % (
self.cfg.get('table'), ",".join(['%s ' % k for k in record]), ",".join(["%s", ] * len(record.keys())), self.key, "%s")
log.info('update query is %s', query)
return query
def insert(self, record):
res = 0
if self.replace and self.key and self.key in record:
# Replace option: try UPDATE if existing
# https://stackoverflow.com/questions/1109061/insert-on-duplicate-update-in-postgresql/6527838#6527838
values = record.values()
values.append(record[self.key])
res = self.db.execute(self.update_query, values)
# del_query = "DELETE FROM %s WHERE %s = '%s'" % (self.cfg.get('table'), self.key, record[self.key])
# res = self.db.execute(del_query)
if res < 1:
# Do insert with values from the record dict
# only if we did not do an UPDATE (res==0) on existing record.
self.db.execute(self.query, record.values())
self.db.commit(close=False)
def write(self, packet):
# Deal with empty or zero-length data structures (list or dict)
if packet.data is None or len(packet.data) == 0:
return packet
# ASSERT: record data present
# record is Python dict (single record) or list of Python dict (multiple records)
#.........这里部分代码省略.........
示例2: write
# 需要导入模块: from stetl.postgis import PostGIS [as 别名]
# 或者: from stetl.postgis.PostGIS import commit [as 别名]
def write(self, packet):
if packet.data is None:
return packet
gml_doc = packet.data
log.info('inserting features in DB')
db = PostGIS(self.cfg.get_dict())
db.connect()
# print self.to_string(gml_doc, False, False)
# NS = {'base': 'urn:x-inspire:specification:gmlas:BaseTypes:3.2', 'gml': 'http://www.opengis.net/gml/3.2'}
# featureMembers = gml_doc.xpath('//base:member/*', namespaces=NS)
featureMembers = gml_doc.xpath("//*[local-name() = '%s']/*" % self.feature_member_tag)
count = 0
gml_ns = None
for childNode in featureMembers:
if gml_ns is None:
if childNode.nsmap.has_key('gml'):
gml_ns = childNode.nsmap['gml']
else:
if childNode.nsmap.has_key('GML'):
gml_ns = childNode.nsmap['GML']
gml_id = childNode.get('{%s}id' % gml_ns)
feature_type_id = self.feature_type_ids[childNode.tag]
# Find a GML geometry in the GML NS
ogrGeomWKT = None
# gmlMembers = childNode.xpath(".//gml:Point|.//gml:Curve|.//gml:Surface|.//gml:MultiSurface", namespaces=NS)
gmlMembers = childNode.xpath(
".//*[local-name() = 'Point']|.//*[local-name() = 'Polygon']|.//*[local-name() = 'Curve']|.//*[local-name() = 'Surface']|.//*[local-name() = 'MultiSurface']")
geom_str = None
for gmlMember in gmlMembers:
if geom_str is None:
geom_str = etree.tostring(gmlMember)
# no need for GDAL Python bindings for now, maybe when we'll optimize with COPY iso INSERT
# ogrGeom = ogr.CreateGeometryFromGML(str(gmlStr))
# if ogrGeom is not None:
# ogrGeomWKT = ogrGeom.ExportToWkt()
# if ogrGeomWKT is not None:
# break
blob = etree.tostring(childNode, pretty_print=False, xml_declaration=False, encoding='UTF-8')
if geom_str is None:
sql = "INSERT INTO gml_objects(gml_id, ft_type, binary_object) VALUES (%s, %s, %s)"
parameters = (gml_id, feature_type_id, db.make_bytea(blob))
else:
# ST_SetSRID(ST_GeomFromGML(%s)),-1)
sql = "INSERT INTO gml_objects(gml_id, ft_type, binary_object, gml_bounded_by) VALUES (%s, %s, %s, ST_SetSRID( ST_GeomFromGML(%s),%s) )"
parameters = (gml_id, feature_type_id, db.make_bytea(blob), geom_str, self.srid)
if db.execute(sql, parameters) == -1:
log.error("feat num# = %d error inserting feature blob=%s (but continuing)" % (count, blob))
# will fail but we will close connection also
db.commit()
# proceed...
log.info('retrying to proceed with remaining features...')
db = PostGIS(self.cfg.get_dict())
db.connect()
count = 0
count += 1
exception = db.commit()
if exception is not None:
log.error("error in commit")
log.info("inserted %s features" % count)
return packet
示例3: WeewxDbInput
# 需要导入模块: from stetl.postgis import PostGIS [as 别名]
# 或者: from stetl.postgis.PostGIS import commit [as 别名]
class WeewxDbInput(SqliteDbInput):
"""
Reads weewx raw archive records from SQLite.
"""
def __init__(self, configdict, section):
SqliteDbInput.__init__(self, configdict, section)
self.progress_query = self.cfg.get('progress_query')
self.progress_update = self.cfg.get('progress_update')
# Connect only once to DB
log.info('Init: connect to Postgres DB')
self.progress_db = PostGIS(self.cfg.get_dict())
self.progress_db.connect()
def exit(self):
# Disconnect from DB when done
log.info('Exit: disconnect from DB')
self.progress_db.disconnect()
def after_chain_invoke(self, packet):
"""
Called right after entire Component Chain invoke.
Used to update last id of processed file record.
"""
# last_datetime.datetime.fromtimestamp(self.last_id).strftime('%Y-%m-%d %H:%M:%S')
ts_local = time.strftime("%Y-%m-%d %H:%M:%S %Z", time.localtime(self.last_id))
log.info('Updating progress table ts_unix=%d ts_local=%s' % (self.last_id, ts_local))
self.progress_db.execute(self.progress_update % (self.last_id, ts_local))
self.progress_db.commit(close=False)
log.info('Update progress table ok')
return True
def read(self, packet):
# Get last processed id of archive table
self.progress_db.execute(self.progress_query)
progress_rec = self.progress_db.cursor.fetchone()
self.last_id = progress_rec[3]
log.info('progress record: %s' % str(progress_rec))
# Fetch next batch of archive records
archive_recs = self.do_query(self.query % self.last_id)
log.info('read archive_recs: %d' % len(archive_recs))
# No more records to process?
if len(archive_recs) == 0:
packet.set_end_of_stream()
log.info('Nothing to do. All file_records done')
return packet
# Remember last id processed for next query
self.last_id = archive_recs[len(archive_recs)-1].get('dateTime')
packet.data = archive_recs
# Always stop after batch, otherwise we would continue forever
packet.set_end_of_stream()
return packet
示例4: DeegreeBlobstoreInput
# 需要导入模块: from stetl.postgis import PostGIS [as 别名]
# 或者: from stetl.postgis.PostGIS import commit [as 别名]
class DeegreeBlobstoreInput(Input):
"""
Read features from deegree Blobstore DB into an etree doc.
produces=FORMAT.etree_doc
"""
# Start attribute config meta
@Config(ptype=int, required=False, default=10000)
def max_features_per_doc(self):
"""
Max features to read from input feature GML stream per internal document.
"""
pass
@Config(ptype=str, required=True, default=None)
def start_container(self):
"""
Tag that starts container.
"""
pass
@Config(ptype=str, required=True, default=None)
def end_container(self):
"""
Tag that ends container.
"""
pass
@Config(ptype=str, required=False, default=False)
def start_feature_tag(self):
"""
XML tag that starts Feature.
"""
pass
@Config(ptype=str, required=False, default=None)
def end_feature_tag(self):
"""
XML tag that ends Feature.
"""
pass
# End attribute config meta
def __init__(self, configdict, section):
Input.__init__(self, configdict, section, produces=FORMAT.etree_doc)
self.cur_feature_blob = None
self.rowcount = 0
# http://www.mkyong.com/regular-expressions/how-to-extract-html-links-with-regular-expression/
self.regex_xlink_href = re.compile("\\s*(?i)xlink:href\\s*=\\s*(\"#([^\"]*\")|'#[^']*'|(#[^'\">\\s]+))")
self.db = None
self.xlink_db = None
self.buffer = None
self.feature_count = 0
# Reusable XML parser
self.xml_parser = etree.XMLParser(remove_blank_text=True)
def init(self):
pass
def read(self, packet):
if packet.is_end_of_stream():
return packet
if self.db is None:
# First time read
log.info("reading records from blobstore..")
self.db = PostGIS(self.cfg.get_dict())
self.db.connect()
sql = self.cfg.get('sql')
self.rowcount = self.db.execute(sql)
self.cur = self.db.cursor
log.info("Read records rowcount=%d" % self.rowcount)
# Init separate connection to fetch objects referenced by xlink:href
self.xlink_db = PostGIS(self.cfg.get_dict())
self.xlink_db.connect()
# Query active
while self.cur is not None:
if self.buffer is None:
self.buffer = self.init_buf()
self.buffer.write(self.start_container)
# Get next blob record
record = self.cur.fetchone()
# End of all records
if record is None:
# End of records: start closing
self.buffer.write(self.end_container)
self.cur = None
self.db.commit()
# Only create doc if there are features in the buffer
if self.feature_count > 0:
#.........这里部分代码省略.........
示例5: PostgresInsertOutput
# 需要导入模块: from stetl.postgis import PostGIS [as 别名]
# 或者: from stetl.postgis.PostGIS import commit [as 别名]
class PostgresInsertOutput(PostgresDbOutput):
"""
Output by inserting single record into Postgres database.
Input is a record (Python dic structure) or a Python list of dicts (records).
Creates an INSERT for Postgres to insert each single record.
consumes=FORMAT.record
"""
def __init__(self, configdict, section, consumes=FORMAT.record):
DbOutput.__init__(self, configdict, section, consumes=[FORMAT.record_array, FORMAT.record])
self.query = None
self.db = None
self.key = self.cfg.get('key')
def init(self):
# Connect only once to DB
log.info('Init: connect to DB')
self.db = PostGIS(self.cfg.get_dict())
self.db.connect()
def exit(self):
# Disconnect from DB when done
log.info('Exit: disconnect from DB')
self.db.disconnect()
def create_query(self, record):
# We assume that all records do the same INSERT key/values
# See http://grokbase.com/t/postgresql/psycopg/12735bvkmv/insert-into-with-a-dictionary-or-generally-with-a-variable-number-of-columns
# e.g. INSERT INTO lml_files ("file_name", "file_data") VALUES (%s,%s)
query = "INSERT INTO %s (%s) VALUES (%s)" % (self.cfg.get('table'), ",".join(['%s' % k for k in record]), ",".join(["%s",]*len(record.keys())))
log.info('query is %s', query)
return query
def write(self, packet):
# Deal with empty or zero-length data structures (list or dict)
if packet.data is None or len(packet.data) == 0:
return packet
# ASSERT: record data present
# record is Python dict (single record) or list of Python dict (multiple records)
record = packet.data
# Generate INSERT query template once
first_record = record
if type(record) is list and len(record) > 0:
first_record = record[0]
# Create query once
if self.query is None:
self.query = self.create_query(first_record)
# Check if record is single (dict) or array (list of dict)
if type(record) is dict:
# Do insert with values from the single record
self.db.execute(self.query, record.values())
self.db.commit(close=False)
# log.info('committed record key=%s' % record[self.key])
elif type(record) is list:
# Multiple records in list
for rec in record:
# Do insert with values from the record
self.db.execute(self.query, rec.values())
self.db.commit(close=False)
log.info('committed %d records' % len(record))
return packet