本文整理汇总了Python中WhfLog.info方法的典型用法代码示例。如果您正苦于以下问题:Python WhfLog.info方法的具体用法?Python WhfLog.info怎么用?Python WhfLog.info使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类WhfLog
的用法示例。
在下文中一共展示了WhfLog.info方法的14个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: createStateFile
# 需要导入模块: import WhfLog [as 别名]
# 或者: from WhfLog import info [as 别名]
def createStateFile(parms, fileType, realtime):
""" Called if there is no state file, look at data dirs and create state
in realtime, in non-realtime create an empty state. Write to file.
Parameters
----------
parms: Parms
Parameter settings
fileType: str
'HRRR', ...
realtime: boolean
True if realtime, False for archive mode
Returns
-------
none
Writes out the state file after creating it
"""
WhfLog.info("Initializing")
state = State("")
if (realtime):
# query each directory and get newest model run file for each, then
# get all for that and previous issue time, this becomes state that
# is not re-processed, we only look for new stuff
data = df.DataFiles(parms._dataDir, parms._maxFcstHour, fileType)
data.setNewestFiles(parms._hoursBack)
for f in data._content:
f.debugPrint("Newest files: " + fileType)
state.initialize(data)
# write out file (at least try to)
state.write(parms._stateFile, fileType)
示例2: main
# 需要导入模块: import WhfLog [as 别名]
# 或者: from WhfLog import info [as 别名]
def main(argv):
# User must pass the config file into the main driver.
configFile = argv[0]
if not os.path.exists(configFile):
print 'ERROR forcing engine config file not found.'
return 1
# read in fixed main params
parms = parmRead(configFile)
# query each directory to get newest thing, and update overall newest
#WhfLog.debug("Looking in %s and %s", parms._hrrrDir, parms._rapDir)
newestT = df.newestIssueTime(parms._hrrrDir)
newestT2 = df.newestIssueTime(parms._rapDir)
if (not newestT) and (not newestT2):
WhfLog.debug("NO INPUT DATA available")
return 0
if (newestT2 > newestT):
newestT = newestT2
#WhfLog.debug("Newest issue time = %s", newestT)
# if there is not a state file, create one now using newest
if (not os.path.exists(parms._stateFile)):
state = State()
WhfLog.info("Initializing")
state.initialSetup(parms)
state.initialize(parms, newestT)
state.write(parms._stateFile)
# Normal processing situation
#WhfLog.debug("Look for Layering....")
# read in state
state2 = State()
state2.initFromStateFile(parms._stateFile)
if state2.isEmpty():
# error return here
return 0
# check for new issue time
if (state2.isNewModelIssueTime(newestT)):
WhfLog.info("Re-Initializing state, new model issue time %s", newestT)
state2.initialize(parms, newestT)
# update availability
state2.setCurrentModelAvailability(parms)
# layer if appropriate
state2.layerIfReady(parms, configFile)
# write out final state
state2.write(parms._stateFile)
return 0
示例3: _layer
# 需要导入模块: import WhfLog [as 别名]
# 或者: from WhfLog import info [as 别名]
def _layer(self, parms, configFile):
""" Perform layering
NOTE: here is where returns status will be added and used
Parameters
----------
parms : Parms
parameters
configFile : string
name of file with settings
"""
path = self._issue.strftime("%Y%m%d%H") + "/"
path += self._valid.strftime("%Y%m%d%H%M") + ".LDASIN_DOMAIN1.nc"
WhfLog.info("LAYERING %s ", path)
srf.forcing(configFile, 'layer', 'HRRR', path, 'RAP', path)
WhfLog.info("DONE LAYERING file=%s", path)
示例4: createStateFile
# 需要导入模块: import WhfLog [as 别名]
# 或者: from WhfLog import info [as 别名]
def createStateFile(parms):
""" Called if there is no state file, look at data dirs and create state
Parameters
----------
parms: Parms
Parameter settings
Returns
-------
none
Writes out the state file after creating it
"""
WhfLog.info("Initializing")
# query directory and get newest model run file, then
# get all for that and previous issue time
cfs = df.DataFiles(parms._cfsDir, parms._maxFcstHourCfs, "CFS")
cfs.setNewestFiles(parms._hoursBackCfs)
for f in cfs._content:
f.debugPrint("Newest files: CFS")
state = State("")
state.initialize(cfs)
# maybe back up and regrid that entire issue time
# maybe redo this exact set of inputs only
# maybe do nothing
# maybe do all of them..for now do nothing as its easiest, just move on
#files = hrrr.getFnames()
#for f in files:
# regridHRRR(f)
#files = rap.getFnames()
#for f in files:
# regridRAP(f)
#files = mrms.getFnames()
#for f in files:
# regridMRMS(f)
# write out file
state.write(parms._stateFile)
示例5: createStateFile
# 需要导入模块: import WhfLog [as 别名]
# 或者: from WhfLog import info [as 别名]
def createStateFile(parms, fileType):
""" Called if there is no state file, look at data dirs and create state
Parameters
----------
parms: Parms
Parameter settings
fileType: str
'HRRR', ...
Returns
-------
none
Writes out the state file after creating it
"""
WhfLog.info("Initializing")
# query each directory and get newest model run file for each, then
# get all for that and previous issue time
data = df.DataFiles(parms._dataDir, parms._maxFcstHour, fileType)
data.setNewestFiles(parms._hoursBack)
for f in data._content:
f.debugPrint("Newest files: " + fileType)
state = State("")
state.initialize(data, fileType)
# maybe back up and regrid that entire issue time
# maybe redo this exact set of inputs only
# maybe do nothing
# maybe do all of them..for now do nothing as its easiest, just move on
# write out file (at least try to)
state.write(parms._stateFile, fileType)
示例6: _passthroughRap
# 需要导入模块: import WhfLog [as 别名]
# 或者: from WhfLog import info [as 别名]
def _passthroughRap(self, parms):
""" Perform pass through of RAP data as if it were layered
NOTE: Add some error status catching for return
Parameters
----------
parms : Parms
parameters
"""
# lots of hardwires here
ymdh = self._issue.strftime("%Y%m%d%H")
path = ymdh + "/"
fname = self._valid.strftime("%Y%m%d%H%M") + ".LDASIN_DOMAIN1.nc"
fnameOut = self._valid.strftime("%Y%m%d%H%M") + ".LDASIN_DOMAIN1"
path += fname
WhfLog.setData('RAP')
WhfLog.info("LAYERING (Passthrough) %s ", path)
# if not there, create the directory to put the file into
fullPath = parms._layerDir + "/"
fullPath += ymdh
if not os.path.exists(fullPath):
os.makedirs(fullPath)
if not os.path.isdir(fullPath):
WhfLog.error("%s is not a directory", fullPath)
else:
# create copy command and do it
cmd = "cp " + parms._rapDir
cmd += "/" + path
cmd += " " + fullPath
cmd += "/"
cmd += fnameOut
WhfLog.info(cmd)
os.system(cmd)
WhfLog.info("LAYERING (Passthrough) %s complete", path)
WhfLog.setData('RAP/HRRR')
示例7: regridCFS
# 需要导入模块: import WhfLog [as 别名]
# 或者: from WhfLog import info [as 别名]
def regridCFS(parmFile, cfsFname):
"""Invoke CFS regridding (see Long_Range_Forcing.py)
Parameters
----------
parmFile : str
name of param file
fname: str
name of file to regrid and downscale, with yyyymmdd parent dir
Returns
-------
None
"""
WhfLog.info("REGRIDDING CFS DATA, file=%s", cfsFname)
try:
lrf.forcing(parmFile, cfsFname)
except:
WhfLog.info("ERROR REGRIDDING CFS DATA, file=%s", cfsFname)
raise
WhfLog.info("DONE REGRIDDING CFS DATA, file=%s", cfsFname)
示例8: forcing
# 需要导入模块: import WhfLog [as 别名]
# 或者: from WhfLog import info [as 别名]
def forcing(config, action, prod, file):
"""Peforms the action on the given data
product and corresponding input file.
Args:
config (string) : Config file name
action (string): Supported actions are:
'regrid' - regrid and downscale
prod (string): The first product [mandatory option]:
(MRMS, HRRR or RAP)
file (string): The file name (full path not necessary,
this is derived from the Python config/
param file and the YYYMMDD portion of
the file name.
Returns:
None Performs the indicated action on the
files based on the type of product and
any other relevant information provided
by the Python config/param file,
wrf_hydro_forcing.parm
"""
# Read the parameters from the config/param file.
parser = SafeConfigParser()
parser.read(config)
# Set up logging, environments, etc.
forcing_config_label = "Anal_Assim"
whf.initial_setup(parser, forcing_config_label)
# Convert the action to lower case
# and the product name to upper case
# for consistent checking
action_requested = action.lower()
product_data_name = prod.upper()
# For analysis and assimilation, only 0hr, 3hr forecast fields from HRRR/RAP
# are necessary. 3hr forecast files are already regridded and downscaled
# from the short-range configuration, so only 0hr forecast files are regridded/downscaled
# here. In addition, MRMS data will be regridded, when available.
if action == "regrid":
(date, modelrun, fcsthr) = whf.extract_file_info(file)
# Usually check for forecast range, but only 0, 3 hr forecast/analysis data used
# Check for HRRR, RAP, MRMS products.
WhfLog.info("Regridding and Downscaling for %s", product_data_name)
if fcsthr == 0 and prod == "HRRR":
downscale_dir = parser.get("downscaling", "HRRR_downscale_output_dir_0hr")
try:
regridded_file = whf.regrid_data(product_data_name, file, parser, False, zero_process=True)
except (FilenameMatchError, NCLError) as e:
WhfLog.error("Unexpected filename format encountered while regridding 0hr HRRR")
raise
except NCLError:
WhfLog.error("NCL error encountered while regridding 0hr HRRR")
raise
try:
whf.downscale_data(product_data_name, regridded_file, parser, False, False, zero_process=True)
except (FilenameMatchError, NCLError) as e:
WhfLog.error("Unexpected filename format encountered while downscaling 0hr HRRR")
raise
except NCLError:
WhfLog.error("NCL error encountered while downscaling 0hr HRRR")
raise
# Move downscaled file to staging area where triggering will monitor
match = re.match(r".*/([0-9]{10})/([0-9]{12}.LDASIN_DOMAIN1.nc)", regridded_file)
if match:
full_dir = downscale_dir + "/" + match.group(1)
full_finished_file = full_dir + "/" + match.group(2)
# File should have been created in downscale_data step.
try:
whf.file_exists(full_finished_file)
except UnrecognizedCommandError:
WhfLog.error("File move failed for regridded/downscaled 0hr HRRR , filename format unexpected")
raise
try:
whf.move_to_finished_area(parser, prod, full_finished_file, zero_move=True)
except:
WhfLog.error("Unsupported/unrecognized command encountered while moving file to finished area.")
raise
else:
WhfLog.error("File name format is unexpected")
raise FilenameMatchError("File name format is unexpected")
elif fcsthr == 0 and prod == "RAP":
downscale_dir = parser.get("downscaling", "RAP_downscale_output_dir_0hr")
try:
regridded_file = whf.regrid_data(product_data_name, file, parser, False, zero_process=True)
except NCLError:
WhfLog.error("NCL error while regridding 0hr RAP")
raise
except FilenameMatchError:
WhfLog.error("Unexpected filename format encountered, cannot regrid 0hr RAP")
raise
#.........这里部分代码省略.........
示例9: anal_assim_layer
# 需要导入模块: import WhfLog [as 别名]
# 或者: from WhfLog import info [as 别名]
#.........这里部分代码省略.........
mrms_ds_dir
+ "/"
+ validDate.strftime("%Y%m%d%H")
+ "/"
+ validDate.strftime("%Y%m%d%H")
+ "00.LDASIN_DOMAIN1.nc"
)
hrrrBiasPath = qpe_parm_dir + "/HRRR_NLDAS-CPC_bias-corr_m" + validDate.strftime("%m") + "_v9_wrf1km.grb2"
hrrrWgtPath = qpe_parm_dir + "/HRRR_wgt_m" + validDate.strftime("%m") + "_v8_wrf1km.grb2"
mrmsBiasPath = (
qpe_parm_dir + "/MRMS_radonly_NLDAS-CPC_bias-corr_m" + validDate.strftime("%m") + "_v9_wrf1km-sm60.grb2"
)
mrmsWgtPath = qpe_parm_dir + "/MRMS_radonly_wgt_m" + validDate.strftime("%m") + "_v8_wrf1km.grb2"
rapBiasPath = qpe_parm_dir + "/RAPD_NLDAS-CPC_bias-corr_m" + validDate.strftime("%m") + "_v9_wrf1km.grb2"
rapWgtPath = qpe_parm_dir + "/RAPD_wgt_m" + validDate.strftime("%m") + "_v8_wrf1km.grb2"
# Sanity checking on parameter data
try:
whf.file_exists(hrrrBiasPath)
whf.file_exists(hrrrWgtPath)
whf.file_exists(mrmsBiasPath)
whf.file_exists(mrmsWgtPath)
whf.file_exists(rapBiasPath)
whf.file_exists(rapWgtPath)
except MissingFileError:
WhfLog.error("Missing file encountered while checking parameter data for AA")
raise
# Compose output file paths
LDASIN_path_tmp = tmp_dir + "/" + validDate.strftime("%Y%m%d%H") + "00.LDASIN_DOMAIN1_TMP.nc"
LDASIN_path_final = out_path + "/" + validDate.strftime("%Y%m%d%H") + "00.LDASIN_DOMAIN1"
# Perform layering/combining depending on processing path.
if process == 1: # RAP only
WhfLog.info(
"Layering and Combining RAP only for cycle date: "
+ cycleDate.strftime("%Y%m%d%H")
+ " valid date: "
+ validDate.strftime("%Y%m%d%H")
)
# Check for existence of input files
try:
whf.file_exists(rap0Path)
whf.file_exists(rap3Path)
except MissingFileError:
WhfLog.error("Missing RAP files for layering")
raise
elif process == 2: # HRRR and RAP only
WhfLog.info(
"Layering and Combining RAP and HRRR for cycle date: "
+ cycleDate.strftime("%Y%m%d%H")
+ " valid date: "
+ validDate.strftime("%Y%m%d%H")
)
# Check for existence of input files
try:
whf.file_exists(rap0Path)
whf.file_exists(rap3Path)
whf.file_exists(hrrr0Path)
whf.file_exists(hrrr3Path)
except MissingFileError:
WhfLog.error("Missing RAP or HRRR files for layering")
raise
elif process == 3: # HRRR, RAP, and MRMS
WhfLog.info(
"Layering and Combining RAP/HRRR/MRMS for cycle date: "
示例10: regrid
# 需要导入模块: import WhfLog [as 别名]
# 或者: from WhfLog import info [as 别名]
def regrid(fname, fileType, configFile):
"""Invoke regridding/downscaling
Parameters
----------
fname: str
name of file to regrid and downscale, with yyyymmdd parent dir
fileType: str
HRRR, RAP, ... string
configFile : str
configuration file with all settings
Returns
-------
None
"""
WhfLog.info("REGRIDDING %s DATA, file=%s", fileType, fname)
try:
if (fileType == 'HRRR'):
srf.forcing(configFile, 'regrid', 'HRRR', fname[9:])
# special case, if it is a 0 hour forecast, do double regrid
regridIfZeroHr(configFile, fileType, fname)
elif (fileType == 'RAP'):
srf.forcing(configFile, 'regrid', 'RAP', fname[9:])
# special case, if it is a 0 hour forecast, do double regrid
regridIfZeroHr(configFile, fileType, fname)
elif (fileType == 'GFS'):
mrf.forcing(configFile, 'regrid', 'GFS', fname[9:])
elif (fileType == 'MRMS'):
aaf.forcing(configFile, 'regrid', 'MRMS', fname[9:])
else:
WhfLog.info("ERROR REGRIDDING %s DATA, file=%s", fileType, fname)
raise InvalidArgumentError("Unknown file type " + fileType)
except ZeroHourReplacementError as z:
WhfLog.info("ERROR REGRIDDING: %s", z)
WhfLog.info("Remove this forecast from list of to do forecasts")
except:
WhfLog.info("ERROR REGRIDDING %s DATA, file=%s", fileType, fname)
raise
WhfLog.info("DONE REGRIDDING %s DATA, file=%s", fileType, fname)
示例11: forcing
# 需要导入模块: import WhfLog [as 别名]
# 或者: from WhfLog import info [as 别名]
def forcing(configFile, action, prod, file, prod2=None, file2=None):
"""Peforms the action on the given data
product and corresponding input file.
Args:
configFile (string): name of file with settings
action (string): Supported actions are:
'regrid' - regrid and downscale
'bias' - bias correction
(requires two
products and two files)
'layer' - layer (requires two
products and two files)
prod (string): The first product [mandatory option]:
(GFS)
file (string): The file name (full path not necessary,
this is derived from the Python config/
param file and the YYYMMDD portion of
the file name.
prod2 (string): The second product (????), default
is None. Required for layering.
file2 (string): The second file name, required for
layering, default is None.
Returns:
None Performs the indicated action on the
files based on the type of product and
any other relevant information provided
by the Python config/param file,
wrf_hydro_forcing.parm
"""
# Read the parameters from the config/param file.
parser = SafeConfigParser()
try:
parser.read(configFile)
except (NoSectionErrorException, DuplicateSectionErrorException,\
DuplicateOptionErrorException,MissingSectionHeaderErrorException,\
ParsingErrorException) as e:
raise
# Set up logging, environments, etc.
forcing_config_label = 'Medium_Range'
whf.initial_setup(parser,forcing_config_label)
# Extract the date, model run time, and forecast hour from the file name
# Use the fcsthr to process only the files that have a fcst hour less than
# the max fcst hr defined in the param/config file.
# Convert the action to lower case
# and the product name to upper case
# for consistent checking
action_requested = action.lower()
product_data_name = prod.upper()
regridded_dir = parser.get('regridding','GFS_output_dir')
downscale_dir = parser.get('downscaling','GFS_downscale_output_dir')
finished_downscale_dir = parser.get('downscaling','GFS_finished_output_dir')
final_dir = parser.get('layering','medium_range_output')
if action == 'regrid':
(date,modelrun,fcsthr) = whf.extract_file_info(file)
# Determine whether this current file lies within the forecast range
# for the data product (e.g. if processing RAP, use only the 0hr-18hr forecasts).
# Skip if this file has a forecast hour greater than the max indicated in the
# parm/config file.
in_fcst_range = whf.is_in_fcst_range(prod, fcsthr, parser)
if in_fcst_range:
# Check for RAP or GFS data products. If this file is
# a 0 hr fcst and is RAP or GFS, substitute each 0hr forecast
# with the file from the previous model run and the same valid
# time. This is necessary because there are missing variables
# in the 0hr forecasts (e.g. precip rate for RAP and radiation
# in GFS).
WhfLog.info("Regridding and Downscaling for %s", product_data_name)
# Determine if this is a 0hr forecast for RAP data (GFS is also missing
# some variables for 0hr forecast, but GFS is not used for Medium Range
# forcing). We will need to substitute this file for the downscaled
# file from a previous model run with the same valid time.
# We only need to do this for downscaled files, as the Medium Range
# forcing files that are regridded always get downscaled and we don't want
# to do this for both the regridding and downscaling.
if fcsthr == 0 and prod == 'GFS':
WhfLog.info("Regridding (ignoring f0 GFS files) %s: ", file )
try:
regridded_file = whf.regrid_data(product_data_name, file, parser, True)
except (FilenameMatchError,NCLError,MissingFileError) as e:
WhfLog.error('Failure:regridding of GFS (ignore 0hr fcst) file: ' + file)
WhfLog.error(e)
raise
try:
whf.downscale_data(product_data_name,regridded_file, parser, True, True)
except (MissingFileError, SystemCommandError,\
NCLError) as e:
WhfLog.error('Downscaling GFS failed: ' + e)
#.........这里部分代码省略.........
示例12: forcing
# 需要导入模块: import WhfLog [as 别名]
# 或者: from WhfLog import info [as 别名]
def forcing(configFile,file_in):
""" Args:
1.) configFile (string): The config file with all
the settings.
2.) file (string): The file name. The full path is
not necessary as full paths will be derived from
parameter directory paths and datetime information.
Returns:
None - Performs indicated bias correction, regridding,
and downscaling of CFSv2 data. Any errors are
trapped and passed back to the driver.
"""
WhfLog.debug("file_in = %s", file_in)
# Obtain CFSv2 forcing engine parameters.
parser = SafeConfigParser()
parser.read(configFile)
# Set up logging environments, etc.
forcing_config_label = "Long_Range"
try:
Whf.initial_setup(parser,forcing_config_label)
except:
raise
out_dir = parser.get('layering','long_range_output')
tmp_dir = parser.get('bias_correction','CFS_tmp_dir')
if (not df.makeDirIfNeeded(out_dir)):
raise MissingDirectoryError('Dir %s cannot be created', out_dir)
if (not df.makeDirIfNeeded(tmp_dir)):
raise MissingDirectoryError('Dir %s cannot be created', tmp_dir)
# Define CFSv2 cycle date and valid time based on file name.
(cycleYYYYMMDD,cycleHH,fcsthr,em) = Whf.extract_file_info_cfs(file_in)
em_str = str(em)
# Pull path to NCL bias correction module file. Export this as an
# environmental variable NCL refers to later.
nclBiasMod = parser.get('exe','CFS_bias_correct_mod')
os.environ["CFS_NCL_BIAS_MOD"] = nclBiasMod
# Establish datetime objects
dateCurrent = datetime.datetime.today()
dateCycleYYYYMMDDHH = datetime.datetime(year=int(cycleYYYYMMDD[0:4]),
month=int(cycleYYYYMMDD[4:6]),
day=int(cycleYYYYMMDD[6:8]),
hour=cycleHH)
dateFcstYYYYMMDDHH = dateCycleYYYYMMDDHH + \
datetime.timedelta(seconds=fcsthr*3600)
# Determine if this is a 0hr forecast file or not.
if dateFcstYYYYMMDDHH == dateCycleYYYYMMDDHH:
fFlag = 1
else:
fFlag = 0
# Establish final output directories to hold 'LDASIN' files used for
# WRF-Hydro long-range forecasting. If the directory does not exist,
# create it.
out_path = out_dir + "/Member_" + em_str.zfill(2) + "/" + \
dateCycleYYYYMMDDHH.strftime("%Y%m%d%H")
try:
Whf.mkdir_p(out_path)
except:
raise
in_fcst_range = Whf.is_in_fcst_range("CFSv2",fcsthr,parser)
if in_fcst_range:
# First, bias-correct CFSv2 data and generate hourly files
# from six-hour forecast
WhfLog.info("Bias correcting for CFSv2 cycle: " + \
dateCycleYYYYMMDDHH.strftime('%Y%m%d%H') + \
" CFSv2 forecast time: " + dateFcstYYYYMMDDHH.strftime('%Y%m%d%H'))
try:
Whf.bias_correction('CFSV2',file_in,dateCycleYYYYMMDDHH,
dateFcstYYYYMMDDHH,parser, em = em)
except (MissingFileError,NCLError):
raise
# Second, regrid to the conus IOC domain
# Loop through each hour in a six-hour CFSv2 forecast time step, compose temporary filename
# generated from bias-correction and call the regridding to go to the conus domain.
if fFlag == 1:
begCt = 6
endCt = 7
else:
begCt = 1
endCt = 7
for hour in range(begCt,endCt):
dateTempYYYYMMDDHH = dateFcstYYYYMMDDHH - datetime.timedelta(seconds=(6-hour)*3600)
fileBiasCorrected = tmp_dir + "/CFSv2_bias_corrected_TMP_" + \
dateCycleYYYYMMDDHH.strftime('%Y%m%d%H') + "_" + \
dateTempYYYYMMDDHH.strftime('%Y%m%d%H') + ".M" + \
em_str.zfill(2) + ".nc"
WhfLog.info("Regridding CFSv2 to conus domain for cycle: " + \
dateCycleYYYYMMDDHH.strftime('%Y%m%d%H') + \
#.........这里部分代码省略.........
示例13: main
# 需要导入模块: import WhfLog [as 别名]
# 或者: from WhfLog import info [as 别名]
def main(argv):
# User must pass the config file into the main driver.
configFile = argv[0]
if not os.path.exists(configFile):
print 'ERROR forcing engine config file not found.'
return 1
# read in fixed main params
parms = parmRead(configFile)
newestT = ""
newestT1 = df.newestIssueTime(parms._hrrrDir)
if (newestT):
if (newestT1):
if (newestT1 > newestT):
newestT = newestT1
else:
newestT = newestT1
newestT1 = df.newestIssueTime(parms._rapDir)
if (newestT):
if (newestT1):
if (newestT1 > newestT):
newestT = newestT1
else:
newestT = newestT1
newestT1 = df.newestIssueTime(parms._hrrr0hrDir)
if (newestT):
if (newestT1):
if (newestT1 > newestT):
newestT = newestT1
else:
newestT = newestT1
newestT1 = df.newestIssueTime(parms._rap0hrDir)
if (newestT):
if (newestT1):
if (newestT1 > newestT):
newestT = newestT1
else:
newestT = newestT1
newestT1 = df.newestIssueTime(parms._mrmsDir)
if (newestT):
if (newestT1):
if (newestT1 > newestT):
newestT = newestT1
else:
newestT = newestT1
if (not newestT):
WhfLog.debug("No data")
return 0
# if there is not a state file, create one now using newest
if (not os.path.exists(parms._stateFile)):
state = State()
WhfLog.info("Initializing")
state.initialize(parms, newestT)
state.write(parms._stateFile)
# Normal processing situation
#WhfLog.debug("Look for Layering....")
# read in state
state2 = State()
state2.initFromStateFile(parms._stateFile)
#state2.debugPrint()
if state2._empty:
# error return here
return 0
# check for new issue time
if (state2.isNewModelIssueTime(newestT)):
WhfLog.info("Re-Initializing state, new model issue time %s", newestT)
state2.initialize(parms, newestT)
# update availability
state2.setCurrentModelAvailability(parms, configFile)
# write out final state
state2.write(parms._stateFile)
return 0
示例14: forcing
# 需要导入模块: import WhfLog [as 别名]
# 或者: from WhfLog import info [as 别名]
def forcing(configFile, action, prod, file, prod2=None, file2=None):
"""Peforms the action on the given data
product and corresponding input file.
Args:
configFile (string): The config file with all the settings
action (string): Supported actions are:
'regrid' - regrid and downscale
'bias' - bias correction
(requires two
products and two files)
'layer' - layer (requires two
products and two files)
prod (string): The first product [mandatory option]:
(HRRR or RAP)
file (string): The file name (full path not necessary,
this is derived from the Python config/
param file and the YYYMMDD portion of
the file name.
prod2 (string): The second product (RAP or HRRR), default
is None. Required for layering.
file2 (string): The second file name, required for
layering, default is None.
Returns:
None Performs the indicated action on the
files based on the type of product and
any other relevant information provided
by the Python config/param file,
wrf_hydro_forcing.parm
"""
# Read the parameters from the config/param file.
parser = SafeConfigParser()
parser.read(configFile)
forcing_config_label = "Short Range"
try:
whf.initial_setup(parser,forcing_config_label)
except Exception as e:
raise
# Extract the date, model run time, and forecast hour from the file name
# Use the fcsthr to process only the files that have a fcst hour less than
# the max fcst hr defined in the param/config file.
# Convert the action to lower case
# and the product name to upper case
# for consistent checking
action_requested = action.lower()
product_data_name = prod.upper()
if action == 'regrid':
# Get the finished directory locations for the relevant product.
if prod == 'RAP':
regridded_dir = parser.get('regridding', 'RAP_output_dir')
downscale_dir = parser.get('downscaling', 'RAP_downscale_output_dir')
finished_downscale_dir = parser.get('downscaling', 'RAP_finished_output_dir')
downscale_input_dir = parser.get('downscaling', 'RAP_data_to_downscale')
elif prod == 'HRRR':
regridded_dir = parser.get('regridding', 'HRRR_output_dir')
downscale_dir = parser.get('downscaling', 'HRRR_downscale_output_dir')
finished_downscale_dir = parser.get('downscaling', 'HRRR_finished_output_dir')
downscale_input_dir = parser.get('downscaling', 'HRRR_data_to_downscale')
(date,modelrun,fcsthr) = whf.extract_file_info(file)
# Determine whether this current file lies within the forecast range
# for the data product (e.g. if processing RAP, use only the 0hr-18hr forecasts).
# Skip if this file has a forecast hour greater than the max indicated in the
# parm/config file.
in_fcst_range = whf.is_in_fcst_range(prod, fcsthr, parser)
if in_fcst_range:
# Check for RAP or GFS data products. If this file is
# a 0 hr fcst and is RAP or GFS, substitute each 0hr forecast
# with the file from the previous model run and the same valid
# time. This is necessary because there are missing variables
# in the 0hr forecasts (e.g. precip rate for RAP and radiation
# in GFS).
WhfLog.info("Regridding and Downscaling for: "+ product_data_name)
# Determine if this is a 0hr forecast for RAP data (GFS is also missing
# some variables for 0hr forecast, but GFS is not used for Short Range
# forcing). We will need to substitute this file for the downscaled
# file from a previous model run with the same valid time.
# We only need to do this for downscaled files, as the Short Range
# forcing files that are regridded always get downscaled and we don't want
# to do this for both the regridding and downscaling.
if fcsthr == 0 and prod == 'RAP':
WhfLog.info("Regridding, ignoring f0 RAP files " )
try:
regridded_file = whf.regrid_data(product_data_name, file, parser, True)
except FilenameMatchError:
WhfLog.error('file name format is unexpected')
raise
#.........这里部分代码省略.........