本文整理汇总了Python中msg_db_connector.MSGDBConnector.cursor方法的典型用法代码示例。如果您正苦于以下问题:Python MSGDBConnector.cursor方法的具体用法?Python MSGDBConnector.cursor怎么用?Python MSGDBConnector.cursor使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类msg_db_connector.MSGDBConnector
的用法示例。
在下文中一共展示了MSGDBConnector.cursor方法的5个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: test_log_successful_export
# 需要导入模块: from msg_db_connector import MSGDBConnector [as 别名]
# 或者: from msg_db_connector.MSGDBConnector import cursor [as 别名]
def test_log_successful_export(self):
"""
Test logging of export results to the export history table.
"""
# @REVIEWED
self.assertTrue(self.exporter.logSuccessfulExport(name = 'test_export',
url =
'http://test_url',
datetime = 0,
size = 100))
conn = MSGDBConnector().connectDB()
cursor = conn.cursor()
dbUtil = MSGDBUtil()
self.assertTrue(
dbUtil.executeSQL(cursor, 'select * from "ExportHistory" where '
'timestamp = '
'to_timestamp(0)'))
self.assertEqual(len(cursor.fetchall()), 1,
"There should only be one result row.")
self.assertTrue(
dbUtil.executeSQL(cursor, 'delete from "ExportHistory" where '
'timestamp = to_timestamp(0)'))
conn.commit()
示例2: MECODBReader
# 需要导入模块: from msg_db_connector import MSGDBConnector [as 别名]
# 或者: from msg_db_connector.MSGDBConnector import cursor [as 别名]
class MECODBReader(object):
"""
Read records from a database.
"""
def __init__(self, testing = False):
"""
Constructor.
:param testing: True if in testing mode.
"""
self.connector = MSGDBConnector()
self.conn = MSGDBConnector(testing).connectDB()
self.dbUtil = MSGDBUtil()
self.dbName = self.dbUtil.getDBName(self.connector.dictCur)
def selectRecord(self, conn, table, keyName, keyValue):
"""
Read a record in the database given a table name, primary key name,
and value for the key.
:param conn DB connection
:param table DB table name
:param keyName DB column name for primary key
:param keyValue Value to be matched
:returns: Row containing record data.
"""
print "selectRecord:"
sql = """SELECT * FROM "%s" WHERE %s = %s""" % (
table, keyName, keyValue)
dcur = conn.cursor(cursor_factory = psycopg2.extras.DictCursor)
self.dbUtil.executeSQL(dcur, sql)
row = dcur.fetchone()
return row
def readingAndMeterCounts(self):
"""
Retrieve the reading and meter counts.
:returns: Multiple lists containing the retrieved data.
"""
sql = """SELECT "Day", "Reading Count",
"Meter Count" FROM count_of_readings_and_meters_by_day"""
dcur = self.conn.cursor(cursor_factory = psycopg2.extras.DictCursor)
self.dbUtil.executeSQL(dcur, sql)
rows = dcur.fetchall()
dates = []
meterCounts = []
readingCounts = []
for row in rows:
dates.append(row[0])
readingCounts.append(row[1] / row[2])
meterCounts.append(row[2])
return dates, readingCounts, meterCounts
示例3: countOfDBExports
# 需要导入模块: from msg_db_connector import MSGDBConnector [as 别名]
# 或者: from msg_db_connector.MSGDBConnector import cursor [as 别名]
def countOfDBExports(self, since = None):
"""
:param since: datetime indicating last export datetime.
:return: Int of count of exports.
"""
myDatetime = lambda x: datetime.datetime.strptime(x, '%Y-%m-%d %H:%S')
if not since:
since = myDatetime('1900-01-01 00:00')
self.logger.log(since.strftime('%Y-%m-%d %H:%M'), 'DEBUG')
sql = 'SELECT COUNT("public"."ExportHistory"."timestamp") FROM ' \
'"public"."ExportHistory" WHERE "timestamp" > \'{}\''.format(
since.strftime('%Y-%m-%d %H:%M'))
conn = MSGDBConnector().connectDB()
cursor = conn.cursor()
dbUtil = MSGDBUtil()
rows = None
if dbUtil.executeSQL(cursor, sql, exitOnFail = False):
rows = cursor.fetchall()
assert len(rows) == 1, 'Invalid return value.'
return rows[0][0]
示例4: logSuccessfulExport
# 需要导入模块: from msg_db_connector import MSGDBConnector [as 别名]
# 或者: from msg_db_connector.MSGDBConnector import cursor [as 别名]
def logSuccessfulExport(self, name = '', url = '', datetime = 0, size = 0):
"""
When an export has been successful, log information about the export
to the database.
The items to log include:
* filename
* URL
* timestamp
* filesize
:param name: String
:param url: String
:param datetime:
:param size: Int
:return: True if no errors occurred, else False.
"""
def exportHistoryColumns():
return ['name', 'url', 'timestamp', 'size']
timestamp = lambda \
datetime: 'to_timestamp(0)' if datetime == 0 else "timestamp " \
"'{}'".format(
datetime)
sql = 'INSERT INTO "{0}" ({1}) VALUES ({2}, {3}, {4}, {5})'.format(
self.configer.configOptionValue('Export', 'export_history_table'),
','.join(exportHistoryColumns()), "'" + name + "'", "'" + url + "'",
timestamp(datetime), size)
conn = MSGDBConnector().connectDB()
cursor = conn.cursor()
dbUtil = MSGDBUtil()
result = dbUtil.executeSQL(cursor, sql, exitOnFail = False)
conn.commit()
return result
示例5: MSGDataAggregator
# 需要导入模块: from msg_db_connector import MSGDBConnector [as 别名]
# 或者: from msg_db_connector.MSGDBConnector import cursor [as 别名]
class MSGDataAggregator(object):
"""
Use for continuous data aggregation of diverse data types relevant to the
Maui Smart Grid project.
Four data types are supported:
1. Irradiance
2. Temperature/Humidity (weather)
3. Circuit
4. eGauge
The general data form conforms to
1. timestamp, subkey_id, val1, val2, val3, ...
2. timestamp, val1, val2, val3, ...
Case (2) is handled within the same space as (1) by testing for the
existence of subkeys.
Current aggregation consists of averaging over **15-min intervals**.
Aggregation is performed in-memory and saved to the DB. The time range is
delimited by start date and end date where the values are included in the
range. The timestamps for aggregation intervals are the last timestamp in a
respective series.
* Aggregation subkeys are values such as eGauge IDs or circuit numbers.
Aggregation is being implemented externally for performance and flexibility
advantages over alternative approaches such as creating a view. It may be
rolled into an internal function at future time if that proves to be
beneficial.
Usage:
from msg_data_aggregator import MSGDataAggregator
aggregator = MSGDataAggregator()
API:
aggregateAllData(dataType = dataType)
aggregateNewData(dataType = dataType)
"""
def __init__(self, exitOnError=True, commitOnEveryInsert=False, testing=False):
"""
Constructor.
:param testing: if True, the testing DB will be connected instead of
the production DB.
"""
self.logger = SEKLogger(__name__, "info")
self.configer = MSGConfiger()
self.conn = MSGDBConnector().connectDB()
self.cursor = self.conn.cursor()
self.dbUtil = MSGDBUtil()
self.notifier = MSGNotifier()
self.mathUtil = MSGMathUtil()
self.timeUtil = MSGTimeUtil()
self.nextMinuteCrossing = {}
self.nextMinuteCrossingWithoutSubkeys = None
self.exitOnError = exitOnError
self.commitOnEveryInsert = commitOnEveryInsert
section = "Aggregation"
tableList = [
"irradiance",
"agg_irradiance",
"weather",
"agg_weather",
"circuit",
"agg_circuit",
"egauge",
"agg_egauge",
]
self.dataParams = {
"weather": ("agg_weather", "timestamp", ""),
"egauge": ("agg_egauge", "datetime", "egauge_id"),
"circuit": ("agg_circuit", "timestamp", "circuit"),
"irradiance": ("agg_irradiance", "timestamp", "sensor_id"),
}
self.columns = {}
# tables[datatype] gives the table name for datatype.
self.tables = {t: self.configer.configOptionValue(section, "{}_table".format(t)) for t in tableList}
for t in self.tables.keys():
self.logger.log("t:{}".format(t), "DEBUG")
try:
self.columns[t] = self.dbUtil.columnsString(self.cursor, self.tables[t])
except TypeError as error:
self.logger.log("Ignoring missing table: Error is {}.".format(error), "error")
def existingIntervals(self, aggDataType="", timeColumnName=""):
"""
Retrieve the existing aggregation intervals for the given data type.
#.........这里部分代码省略.........