本文整理汇总了Python中msg_logger.MSGLogger.log方法的典型用法代码示例。如果您正苦于以下问题:Python MSGLogger.log方法的具体用法?Python MSGLogger.log怎么用?Python MSGLogger.log使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类msg_logger.MSGLogger
的用法示例。
在下文中一共展示了MSGLogger.log方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: MECODataAutoloader
# 需要导入模块: from msg_logger import MSGLogger [as 别名]
# 或者: from msg_logger.MSGLogger import log [as 别名]
class MECODataAutoloader(object):
"""
Provide automated loading of MECO energy data from exports in gzip-compressed XML source data.
"""
def __init__(self):
"""
Constructor.
"""
self.logger = MSGLogger(__name__)
self.configer = MSGConfiger()
self.fileUtil = MSGFileUtil()
def newDataExists(self):
"""
Check the data autoload folder for the presence of new data.
:returns: True if new data exists.
"""
autoloadPath = self.configer.configOptionValue('MECO Autoload', 'meco_new_data_path')
if not self.fileUtil.validDirectory(autoloadPath):
raise Exception('InvalidDirectory', '%s' % autoloadPath)
patterns = ['*.gz']
matchCnt = 0
for root, dirs, filenames in os.walk(autoloadPath):
for pat in patterns:
for filename in fnmatch.filter(filenames, pat):
print filename
matchCnt += 1
if matchCnt > 0:
return True
else:
return False
def loadNewData(self):
"""
Load new data contained in the new data path.
"""
autoloadPath = self.configer.configOptionValue('MECO Autoload', 'meco_new_data_path')
command = self.configer.configOptionValue('MECO Autoload', 'data_load_command')
os.chdir(autoloadPath)
try:
subprocess.check_call(command, shell = True)
except subprocess.CalledProcessError, e:
self.logger.log("An exception occurred: %s" % e, 'error')
示例2: MSGWeatherDataDupeChecker
# 需要导入模块: from msg_logger import MSGLogger [as 别名]
# 或者: from msg_logger.MSGLogger import log [as 别名]
class MSGWeatherDataDupeChecker(object):
"""
Determine if a duplicate record exists based on the tuple
(WBAN, Date, Time, StationType).
"""
def __init__(self, testing = False):
"""
Constructor.
:param testing: Flag for testing mode.
"""
self.logger = MSGLogger(__name__, 'debug')
self.dbUtil = MSGDBUtil()
def duplicateExists(self, dbCursor, wban, datetime, recordType):
"""
Check for the existence of a duplicate record.
:param dbCursor
:param wban
:param datetime
:param recordType
:returns: True if a duplicate record exists, otherwise False.
"""
tableName = "WeatherNOAA"
sql = """SELECT wban, datetime, record_type FROM \"%s\" WHERE
wban = '%s' AND datetime = '%s' AND record_type = '%s'""" % (
tableName, wban, datetime, recordType)
self.logger.log("sql=%s" % sql, 'debug')
self.logger.log("wban=%s, datetime=%s, record_type=%s" % (
wban, datetime, recordType), 'debug')
self.dbUtil.executeSQL(dbCursor, sql)
rows = dbCursor.fetchall()
if len(rows) > 0:
return True
else:
return False
示例3: MSGTimeUtilTester
# 需要导入模块: from msg_logger import MSGLogger [as 别名]
# 或者: from msg_logger.MSGLogger import log [as 别名]
class MSGTimeUtilTester(unittest.TestCase):
def setUp(self):
self.logger = MSGLogger(__name__, 'debug')
self.timeUtil = MSGTimeUtil()
def test_concise_now(self):
conciseNow = self.timeUtil.conciseNow()
self.logger.log(conciseNow)
pattern = '\d+-\d+-\d+_\d+'
result = re.match(pattern, conciseNow)
self.assertTrue(result is not None,
"Concise now matches the regex pattern.")
def test_split_dates(self):
start = dt(2014, 01, 07)
end = dt(2014, 04, 04)
print self.timeUtil.splitDates(start, end)
self.assertEqual(len(self.timeUtil.splitDates(start, end)), 4,
'Unexpected date count.')
示例4: WeatherDataLoadingTester
# 需要导入模块: from msg_logger import MSGLogger [as 别名]
# 或者: from msg_logger.MSGLogger import log [as 别名]
class WeatherDataLoadingTester(unittest.TestCase):
def setUp(self):
self.weatherUtil = MSGWeatherDataUtil()
self.logger = MSGLogger(__name__, 'DEBUG')
self.dbConnector = MSGDBConnector()
self.cursor = self.dbConnector.conn.cursor()
self.configer = MSGConfiger()
def testLoadDataSinceLastLoaded(self):
"""
Data should be loaded since the last data present in the database.
"""
pass
def testRetrieveDataSinceLastLoaded(self):
"""
Data since the last loaded date is retrieved.
"""
pass
def testGetLastLoadedDate(self):
myDate = self.weatherUtil.getLastDateLoaded(self.cursor).strftime(
"%Y-%m-%d %H:%M:%S")
pattern = '^(\d+-\d+-\d+\s\d+:\d+:\d+)$'
match = re.match(pattern, myDate)
assert match and (match.group(1) == myDate), "Date format is valid."
def testWeatherDataPattern(self):
myPattern = self.configer.configOptionValue('Weather Data',
'weather_data_pattern')
testString = """<A HREF="someURL">QCLCD201208.zip</A>"""
match = re.match(myPattern, testString)
self.logger.log("pattern = %s" % myPattern, 'info')
if match:
self.logger.log("match = %s" % match)
self.logger.log("match group = %s" % match.group(1))
else:
self.logger.log("match not found")
assert match and match.group(
1) == 'QCLCD201208.zip', "Download filename was matched."
def testWeatherDataURL(self):
myURL = self.configer.configOptionValue('Weather Data',
'weather_data_url')
pass
示例5: Zhang
# 需要导入模块: from msg_logger import MSGLogger [as 别名]
# 或者: from msg_logger.MSGLogger import log [as 别名]
"""
Check for new MECO data and load it if it is present.
This script is intended to be run automatically.
"""
__author__ = 'Daniel Zhang (張道博)'
__copyright__ = 'Copyright (c) 2013, University of Hawaii Smart Energy Project'
__license__ = 'https://raw.github' \
'.com/Hawaii-Smart-Energy-Project/Maui-Smart-Grid/master/BSD' \
'-LICENSE.txt'
from msg_logger import MSGLogger
from meco_data_autoloader import MECODataAutoloader
SUPPRESS_OUTPUT_FOR_NO_DATA = True
logger = MSGLogger(__name__)
autoloader = MECODataAutoloader()
if autoloader.newDataExists():
logger.log('Loading new data.')
autoloader.loadNewData()
logger.log('Archiving loaded data.')
autoloader.archiveLoadedData()
else:
if not SUPPRESS_OUTPUT_FOR_NO_DATA:
logger.log('No new data was found.')
示例6: MSGDBExporterTester
# 需要导入模块: from msg_logger import MSGLogger [as 别名]
# 或者: from msg_logger.MSGLogger import log [as 别名]
class MSGDBExporterTester(unittest.TestCase):
"""
Unit tests for the MSG Cloud Exporter.
"""
def setUp(self):
self.logger = MSGLogger(__name__, 'DEBUG')
self.configer = MSGConfiger()
self.exporter = MSGDBExporter()
self.testDir = 'db_exporter_test'
self.uncompressedTestFilename = 'meco_v3_test_data.sql'
self.compressedTestFilename = 'meco_v3_test_data.sql.gz'
self.exportTestDataPath = self.configer.configOptionValue('Testing',
'export_test_data_path')
self.fileUtil = MSGFileUtil()
self.fileChunks = []
self.testDataFileID = ''
self.pyUtil = MSGPythonUtil()
conn = None
try:
conn = MSGDBConnector().connectDB()
except Exception as detail:
self.logger.log("Exception occurred: {}".format(detail), 'error')
exit(-1)
self.logger.log("conn = {}".format(conn), 'debug')
self.assertIsNotNone(conn)
# Create a temporary working directory.
try:
os.mkdir(self.testDir)
except OSError as detail:
self.logger.log(
'Exception during creation of temp directory: %s' % detail,
'ERROR')
def upload_test_data_to_cloud(self):
"""
Provide an upload of test data that can be used in other tests.
Side effect: Store the file ID as an ivar.
"""
self.logger.log("Uploading test data for caller: {}".format(
self.pyUtil.caller_name()))
filePath = "{}/{}".format(self.exportTestDataPath,
self.compressedTestFilename)
self.logger.log('Uploaded {}.'.format(filePath), 'info')
uploadResult = self.exporter.uploadFileToCloudStorage(filePath)
self.logger.log('upload result: {}'.format(uploadResult))
self.testDataFileID = self.exporter.fileIDForFileName(
self.compressedTestFilename)
self.logger.log("Test file ID is {}.".format(self.testDataFileID))
def test_sending_fcphase_part_0(self):
"""
/home/daniel/msg-db-dumps/2014-05-14_141223_fcphase3.sql.gz.0
"""
filesToUpload = [
'/home/daniel/msg-db-dumps/2014-05-14_141223_fcphase3.sql.gz.0',
'/home/daniel/msg-db-dumps/2014-05-14_141223_fcphase3.sql.gz.1',
'/home/daniel/msg-db-dumps/2014-05-14_141223_fcphase3.sql.gz.2',
'/home/daniel/msg-db-dumps/2014-05-14_141223_fcphase3.sql.gz.3']
for f in filesToUpload:
self.exporter.uploadFileToCloudStorage(fullPath = f,
testing = False)
def testListRemoteFiles(self):
"""
Test listing of remote files.
"""
self.logger.log('Testing listing of remote files.', 'INFO')
title = ''
id = ''
for item in self.exporter.cloudFiles['items']:
title = item['title']
id = item['id']
self.assertIsNot(title, '')
self.assertIsNot(id, '')
def testDownloadURLList(self):
"""
Test obtaining a list of downloadble URLs.
"""
self.logger.log('Testing listing of downloadable files.', 'INFO')
title = ''
id = ''
url = ''
for item in self.exporter.cloudFiles['items']:
title = item['title']
#.........这里部分代码省略.........
示例7: MSGLogger
# 需要导入模块: from msg_logger import MSGLogger [as 别名]
# 或者: from msg_logger.MSGLogger import log [as 别名]
"""
global COMMAND_LINE_ARGS
parser = argparse.ArgumentParser(description = '')
parser.add_argument('--dbname', help = 'Database file to be uploaded.')
parser.add_argument('--fullpath',
help = 'Full path to database file to be uploaded.')
parser.add_argument('--testing', action = 'store_true', default = False)
COMMAND_LINE_ARGS = parser.parse_args()
if __name__ == '__main__':
logger = MSGLogger(__name__, 'INFO')
logger.log("Exporting DBs to cloud.")
processCommandLineArguments()
exporter = MSGDBExporter()
notifier = MSGNotifier()
exporter.logger.shouldRecord = True
startTime = time.time()
noErrors = exporter.exportDB(
databases = exporter.configer.configOptionValue('Export',
'dbs_to_export').split(
','), toCloud = True, testing = COMMAND_LINE_ARGS.testing,
numChunks = int(exporter.configer.configOptionValue('Export',
'num_split_sections')),
deleteOutdated = True)
示例8: MSGEgaugeNewDataChecker
# 需要导入模块: from msg_logger import MSGLogger [as 别名]
# 或者: from msg_logger.MSGLogger import log [as 别名]
class MSGEgaugeNewDataChecker(object):
"""
Provide notification of newly loaded MSG eGauge data.
This uses notification type MSG_EGAUGE_SERVICE.
"""
def __init__(self):
"""
Constructor.
"""
print __name__
self.logger = MSGLogger(__name__)
self.connector = MSGDBConnector()
self.dbUtil = MSGDBUtil()
self.notifier = MSGNotifier()
self.configer = MSGConfiger()
def newDataCount(self):
"""
Measure the amount of new data that is present since the last time
new data was reported.
"""
cursor = self.connector.conn.cursor()
tableName = 'EgaugeEnergyAutoload'
lastTime = self.lastReportDate('MSG_EGAUGE_SERVICE')
if lastTime is None:
lastTime = '1900-01-01'
sql = """SELECT COUNT(*) FROM "%s" WHERE datetime > '%s'""" % (
tableName, lastTime)
success = self.dbUtil.executeSQL(cursor, sql)
if success:
rows = cursor.fetchall()
if not rows[0][0]:
return 0
else:
return rows[0][0]
else:
# @todo Raise an exception.
return None
def lastReportDate(self, notificationType):
"""
Get the last time a notification was reported.
:param notificationType: A string indicating the type of the
notification. It is stored in the event history.
:returns: datetime of last report date.
"""
cursor = self.connector.conn.cursor()
sql = """SELECT MAX("notificationTime") FROM "%s" WHERE
"notificationType" = '%s'""" % (
NOTIFICATION_HISTORY_TABLE, notificationType)
success = self.dbUtil.executeSQL(cursor, sql)
if success:
rows = cursor.fetchall()
if not rows[0][0]:
return None
else:
return rows[0][0]
else:
# @todo Raise an exception.
return None
def saveNotificationTime(self):
"""
Save the notification event to the notification history.
"""
cursor = self.connector.conn.cursor()
sql = """INSERT INTO "%s" ("notificationType", "notificationTime")
VALUES ('MSG_EGAUGE_SERVICE', NOW())""" % NOTIFICATION_HISTORY_TABLE
success = self.dbUtil.executeSQL(cursor, sql)
self.connector.conn.commit()
if not success:
# @todo Raise an exception.
self.logger.log(
'An error occurred while saving the notification time.')
def sendNewDataNotification(self, testing = False):
"""
Sending notification reporting on new data being available since the
last time new data was reported.
:param testing: Use testing mode when True.
"""
lastReportDate = self.lastReportDate('MSG_EGAUGE_SERVICE')
#.........这里部分代码省略.........
示例9: VALUES
# 需要导入模块: from msg_logger import MSGLogger [as 别名]
# 或者: from msg_logger.MSGLogger import log [as 别名]
cursor = self.cursor
sql = """INSERT INTO "{}" ("notificationType", "notificationTime")
VALUES ('{}', NOW())""".format(NOTIFICATION_HISTORY_TABLE,
NOTIFICATION_HISTORY_TYPE)
success = self.dbUtil.executeSQL(cursor, sql)
self.conn.commit()
if not success:
raise Exception('Exception while saving the notification time.')
def aggregateNewData(self):
"""
:return: list of dicts obtained from
MSGDataAggregator::aggregateNewData.
"""
result = map(self.aggregator.aggregateNewData, self.rawTypes)
self.logger.log('result {}'.format(result))
return result
if __name__ == '__main__':
aggregator = NewDataAggregator()
logger = MSGLogger(__name__)
logger.log('Last report date {}'.format(
aggregator.lastReportDate(NOTIFICATION_HISTORY_TYPE)))
result = aggregator.aggregateNewData()
aggregator.sendNewDataNotification(result = result, testing = False)
示例10: range
# 需要导入模块: from msg_logger import MSGLogger [as 别名]
# 或者: from msg_logger.MSGLogger import log [as 别名]
for line in csv.reader(csvFile, delimiter = ","):
if lineCnt != 0: # Skip header.
data = line[0:len(cols)] # Overshoot columns to get the last column.
for i in range(0, len(cols)):
if len(data[i]) == 0:
data[i] = 'NULL'
else:
# Escape single quotes with double single quotes in
# PostgreSQL.
data[i] = data[i].replace("'", "\'\'")
data[i] = "'" + data[i] + "'"
sql = """INSERT INTO "MeterLocationHistory" (%s) VALUES (%s)""" % (
','.join(cols), ','.join(data))
logger.log("SQL: %s" % sql, 'debug')
success = dbUtil.executeSQL(cur, sql)
if not success:
anyFailure = True
lineCnt += 1
conn.commit()
msg = ("Processed %s lines.\n" % lineCnt)
sys.stderr.write(msg)
msgBody += msg
if not anyFailure:
msg = "Finished inserting Meter Location History records.\n"
sys.stderr.write(msg)
示例11: MSGConfiger
# 需要导入模块: from msg_logger import MSGLogger [as 别名]
# 或者: from msg_logger.MSGLogger import log [as 别名]
class MSGConfiger(object):
"""
Supports system-specific configuration for MECO data processing.
The site-level configuration file is located in ~/.meco-data-operations.cfg.
Usage:
configer = MSGConfiger()
"""
def __init__(self):
"""
Constructor.
"""
self._config = ConfigParser.ConfigParser()
self.logger = MSGLogger(__name__, 'INFO')
# Define tables that will have data inserted. Data will only be inserted
# to tables that are defined here.
self.insertTables = (
'MeterData', 'RegisterData', 'RegisterRead', 'Tier', 'Register',
'IntervalReadData', 'Interval', 'Reading', 'EventData', 'Event')
# Check permissions on the config file. Refuse to run if the permissions
# are not set appropriately.
configFilePath = '~/.msg-data-operations.cfg'
if self.isMoreThanOwnerReadableAndWritable(
os.path.expanduser(configFilePath)):
self.logger.log(
"Configuration file permissions are too permissive. Operation "
"will not continue.", 'error')
sys.exit()
try:
self._config.read(['site.cfg', os.path.expanduser(configFilePath)])
except:
self.logger.log("Critical error: The data in {} cannot be "
"accessed successfully.".format(configFilePath),
'ERROR')
sys.exit(-1)
def configOptionValue(self, section, option):
"""
Get a configuration value from the local configuration file.
:param section: String of section in config file.
:param option: String of option in config file.
:returns: The value contained in the configuration file.
"""
try:
configValue = self._config.get(section, option)
if configValue == "True":
return True
elif configValue == "False":
return False
else:
return configValue
except:
self.logger.log(
"Failed when getting configuration option {} in section {"
"}.".format(option, section), 'error')
sys.exit(-1)
def isMoreThanOwnerReadableAndWritable(self, filePath):
"""
Determines if a file has greater permissions than owner read/write.
:param filePath: String for path to the file being tested.
:returns: Boolean True if the permissions are greater than owner
read/write, otherwise return False.
"""
st = os.stat(filePath)
# Permissions are too permissive if group or others can read,
# write or execute.
if bool(st.st_mode & stat.S_IRGRP) or bool(
st.st_mode & stat.S_IROTH) or bool(
st.st_mode & stat.S_IWGRP) or bool(
st.st_mode & stat.S_IWOTH) or bool(
st.st_mode & stat.S_IXGRP) or bool(
st.st_mode & stat.S_IXOTH):
return True
else:
return False
示例12: MSGDataVerifier
# 需要导入模块: from msg_logger import MSGLogger [as 别名]
# 或者: from msg_logger.MSGLogger import log [as 别名]
class MSGDataVerifier(object):
"""
Perform verification procedures related to data integrity.
"""
def __init__(self):
"""
Constructor.
"""
self.logger = MSGLogger(__name__, 'DEBUG')
self.cursor = MSGDBConnector().connectDB().cursor()
self.dbUtil = MSGDBUtil()
def mecoReadingsDupeCount(self):
"""
Generate counts of MECO dupe readings.
"""
dupes = 0
startDate = lambda y, m: '%d-%02d-%02d' % (y, m, 1)
endDate = lambda y, m: '%d-%02d-%02d' % (
y, m, calendar.monthrange(y, m)[1])
for y in YEARS:
startDates = [startDate(y, m) for m in
map(lambda x: x + 1, range(12))]
endDates = [endDate(y, m) for m in map(lambda x: x + 1, range(12))]
for start in startDates:
cnt = self.__mecoReadingsDupeCount(start, endDates[
startDates.index(start)])
self.logger.log('start: %s, dupe cnt: %s' % (start, cnt),
'INFO')
dupes += cnt
return dupes
def __mecoReadingsDupeCount(self, startDate, endDate):
"""
:param startDate:
:param endDate:
:returns: DB row count.
"""
self.dbUtil.executeSQL(self.cursor, """SELECT "Interval".end_time,
"MeterData".meter_name,
"Reading".channel
FROM "MeterData"
INNER JOIN "IntervalReadData" ON "MeterData"
.meter_data_id = "IntervalReadData".meter_data_id
INNER JOIN "Interval" ON "IntervalReadData"
.interval_read_data_id = "Interval".interval_read_data_id
INNER JOIN "Reading" ON "Interval".interval_id = "Reading"
.interval_id
WHERE "Interval".end_time BETWEEN '%s' and '%s'
GROUP BY "MeterData".meter_name,
"Interval".end_time,
"Reading".channel
HAVING (COUNT(*) > 1)""" % (startDate, endDate))
return len(self.cursor.fetchall())
def egaugeAggregationCount(self):
"""
There should not be more than 96 15-min interval endpoints within a
single calendar day for a given sub ID.
:return:
"""
pass
示例13: MSGTimeUtil
# 需要导入模块: from msg_logger import MSGLogger [as 别名]
# 或者: from msg_logger.MSGLogger import log [as 别名]
class MSGTimeUtil(object):
"""
Utilities for working with time.
"""
def __init__(self):
"""
Constructor.
"""
self.logger = MSGLogger(__name__, 'debug')
def reportOfDays(self, datetimes = None):
"""
Return report of days processed given a set of days.
:param datetimes: A set of datetimes.
:returns: Report of processing as a string.
"""
# @todo Verify datetimes is a Set.
# self.logger.log("datetimes = %s" % datetimes, 'debug')
if datetimes is None:
return "No days processed."
myDates = set()
for day in datetimes:
self.logger.log('Processing day %s.' % day)
myDates.add(day.date())
datetimeList = list(myDates)
datetimeList.sort()
countOfDays = len(datetimeList)
firstDay = datetimeList[0]
lastDay = datetimeList[len(datetimeList) - 1]
if countOfDays == 1:
return "Processed 1 day with date %s." % (firstDay)
else:
return "Processed %s days between %s to %s, inclusive." % (
countOfDays, firstDay, lastDay)
def conciseNow(self):
"""
Returns the current date and time in a concise format.
"""
return dt.now().strftime('%Y-%m-%d_%H%M%S')
def splitStringDates(self, startDate = '', endDate = ''):
"""
Break down two dates into a list containing the start and end dates
for each month within the range.
:param startDate: string
:param endDate: string
:return: List of tuples.
"""
# self.logger.log('start,end: %s,%s' % (startDate, endDate))
myDatetime = lambda x: dt.strptime(x, '%Y-%m-%d')
firstDay = lambda x: dt.strptime(x.strftime('%Y-%m-01'), '%Y-%m-%d')
startDates = map(firstDay, list(
rrule.rrule(rrule.MONTHLY, dtstart = myDatetime(startDate),
until = myDatetime(endDate))))
startDates[0] = myDatetime(startDate)
lastDay = lambda x: dt.strptime('%d-%d-%d' % (
x.year, x.month, calendar.monthrange(x.year, x.month)[1]),
'%Y-%m-%d')
endDates = map(lastDay, startDates)
endDates[-1] = myDatetime(endDate)
assert len(startDates) == len(
endDates), 'Mismatch of start and end dates.'
return zip(startDates, endDates)
def splitDates(self, start = None, end = None):
"""
Break down two dates into a list containing the start and end dates
for each month within the range.
:param start: datetime
:param end: datetime
:return: List of tuples.
"""
self.logger.log('start {}, end {}'.format(start, end), 'debug')
# First day of the month.
firstDay = lambda x: dt.strptime(x.strftime('%Y-%m-01'), '%Y-%m-%d')
startDates = map(firstDay, list(
rrule.rrule(rrule.MONTHLY, dtstart = firstDay(start), until = end)))
# @todo add assert for verifying sorted start dates.
startDates[0] = start
#.........这里部分代码省略.........
示例14: Inserter
# 需要导入模块: from msg_logger import MSGLogger [as 别名]
# 或者: from msg_logger.MSGLogger import log [as 别名]
class Inserter(object):
"""
Perform insertion of data contained in a single file to the MECO database
specified in the configuration file.
"""
def __init__(self, testing = False):
"""
Constructor.
:param testing: Flag indicating if testing mode is on.
"""
self.logger = MSGLogger(__name__)
self.parser = MECOXMLParser(testing)
self.configer = MSGConfiger()
def insertData(self, filePath, testing = False, jobID = ''):
"""
Insert data from a single file to the database.
:param filePath: Full path of a data file.
:param testing: Boolean flag indicating if the testing database
should be used.
:param jobID: An ID used to distinguish multiprocessing jobs.
:returns: String containing concise log of activity.
"""
parseMsg = ''
parseLog = ''
print "Processing file %s." % filePath
i = Inserter(testing)
if i.configer.configOptionValue("Debugging", "debug"):
print "Debugging is on"
if testing:
parseMsg = "\nInserting data to database %s.\n" % i.configer\
.configOptionValue(
"Database", "testing_db_name")
sys.stderr.write(parseMsg)
parseLog += parseMsg
else:
parseMsg += "\nInserting data to database %s.\n" % i.configer\
.configOptionValue(
"Database", "db_name")
sys.stderr.write(parseMsg)
parseLog += parseMsg
fileObject = None
# Open the file and process it.
if re.search('.*\.xml$', filePath):
fileObject = open(filePath, "rb")
elif re.search('.*\.xml\.gz$', filePath):
fileObject = gzip.open(filePath, "rb")
else:
print "Error: %s is not an XML file." % filePath
try:
with FileLock(filePath, timeout = 2) as lock:
self.logger.log("Locking %s " % filePath)
i.parser.filename = filePath
# Obtain the log of the parsing.
parseLog += i.parser.parseXML(fileObject, True, jobID = jobID)
fileObject.close()
except TypeError:
self.logger.log('Type error occurred', 'error')
return parseLog
示例15: MSGNOAAWeatherDataParser
# 需要导入模块: from msg_logger import MSGLogger [as 别名]
# 或者: from msg_logger.MSGLogger import log [as 别名]
class MSGNOAAWeatherDataParser(object):
"""
Given a file object containing NOAA weather data, return a data structure
containing the data.
"""
def __init__(self):
"""
Constructor.
"""
self.logger = MSGLogger(__name__, 'debug')
self.cols = ["wban", "datetime", "datetime", "station_type",
"sky_condition", "sky_condition_flag", "visibility",
"visibility_flag", "weather_type", "weather_type_flag",
"dry_bulb_farenheit", "dry_bulb_farenheit_flag",
"dry_bulb_celsius", "dry_bulb_celsius_flag",
"wet_bulb_farenheit", "wet_bulb_farenheit_flag",
"wet_bulb_celsius", "wet_bulb_celsius_flag",
"dew_point_farenheit", "dew_point_farenheit_flag",
"dew_point_celsius", "dew_point_celsius_flag",
"relative_humidity", "relative_humidity_flag",
"wind_speed", "wind_speed_flag", "wind_direction",
"wind_direction_flag", "value_for_wind_character",
"value_for_wind_character_flag", "station_pressure",
"station_pressure_flag", "pressure_tendency",
"pressure_tendency_flag", "pressure_change",
"pressure_change_flag", "sea_level_pressure",
"sea_level_pressure_flag", "record_type",
"record_type_flag", "hourly_precip", "hourly_precip_flag",
"altimeter", "altimeter_flag"]
def parseWeatherData(self, fileObject, stationIDs):
"""
:param fileObject: File object containing weather data.
:param stationIDs: List of station IDs to be parsed.
:returns: List of dictionaries containing parsed weather data.
"""
self.logger.log('Data column count = %s' % len(self.cols), 'debug')
rowNum = 0
lastCol = 0
reader = csv.reader(fileObject)
self.data = []
for row in reader:
rowDict = {}
newDate = ''
# Handle the header row and determine the last column.
if rowNum == 0:
colNum = 0
for col in row:
colNum += 1
lastCol = colNum
else:
colNum = 0
for col in row:
if colNum == 0:
if self.stationShouldBeProcessed(col, stationIDs):
rowDict['wban'] = col
else:
# Skip station IDs not marked for processing.
break
elif colNum == 1: # date column
newDate = '%s' % (
(col[0:4]) + '-%s' % (col[4:6]) + '-%s' % (
col[6:8]))
elif colNum == 2: # time column
time = col.zfill(4)
rowDict['datetime'] = '%s %s:%s' % (
newDate, time[0:2], time[2:4])
newDate = ''
else:
try:
rowDict[self.cols[colNum]] = '%s' % col
except IndexError, e:
print "Exception during first assignment: %s, " \
"Index = %s" % (
e, colNum)
colNum += 1
if colNum == lastCol:
for i in range(0, lastCol - 1):
try:
if len(rowDict[self.cols[i]]) == 0 or len(
re.sub("\s+", "",
rowDict[self.cols[i]])) == 0:
rowDict[self.cols[i]] = 'NULL'
except IndexError, e:
print "Exception during second assignment: " \
#.........这里部分代码省略.........