本文整理汇总了Python中netCDF4.num2date函数的典型用法代码示例。如果您正苦于以下问题:Python num2date函数的具体用法?Python num2date怎么用?Python num2date使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了num2date函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: IsAdiacent
def IsAdiacent(self,Test) :
#print stc,ttc
if self.ClimatologicalField :
import netCDF4
import datetime
#print self.TimeCells,Test.TimeCells
stc=netCDF4.num2date(self.TimeCells,units='hours since 1900-01-01 00:00:00',calendar='standard')
ttc=netCDF4.num2date(Test.TimeCells,units='hours since 1900-01-01 00:00:00',calendar='standard')
print >>sys.stderr, 'WARNING 11 : leap year for climatology...'
#print 'stc',stc
#print 'ttc',ttc
#if (stc[0].year == ttc[0].year and stc[1].year == ttc[1].year ) :
#stc[1].year=ttc[0].year
if stc[-1][1].month == 2 and stc[-1][1].day == 29 : il_day=28
else : il_day=stc[-1][1].day
nstc=datetime.datetime(ttc[0][0].year,stc[-1][1].month,il_day,stc[-1][1].hour,stc[-1][1].minute)
#print nstc,stc[0][0].year,ttc[-1][1].month,ttc[-1][1].day
if ttc[-1][1].month == 2 and ttc[-1][1].day == 29 : il_day=28
else : il_day=ttc[-1][1].day
nttc=datetime.datetime(stc[0][0].year,ttc[-1][1].month,il_day,ttc[-1][1].hour,ttc[-1][1].minute)
#print 'nstc',nstc
#print 'nttc',nttc
# if stc[0][0].year <= ttc[0][0].year and ( nstc==ttc[0][0] or nttc==stc[0][0] ) :
if ( nstc==ttc[0][0] and stc[0][0].year <= ttc[0][0].year ) or ( nttc==stc[0][0] and ttc[0][0].year <= stc[0][0].year ) :
#if self.TimeCells[0]+Test.TimeCells[1]-self.TimeCells[0]==Test.TimeCells[0] :
#print 'vero',stc[0][0].year,ttc[0][0].year
return True
else :
if self.TimeCells[-1] == Test.TimeCells[0] or self.TimeCells[0] == Test.TimeCells[1] :
return True
return False
示例2: plot_filtered_timeseries
def plot_filtered_timeseries(pressure, pressure_units, pressure_time, time_units,
pressure_filtered, delay, figdir, n1, dayrissaga):
# Very simple plot to show the evolution of the pressure
hfmt = dates.DateFormatter('%d %B')
time2plot = netCDF4.num2date(pressure_time, time_units)
time2plot_filter = netCDF4.num2date(pressure_time - delay, time_units)
fig = plt.figure(num=None, figsize=(14, 6))
ax = fig.add_subplot(111)
plt.plot(time2plot, pressure, 'k', lw=0.5, label='Raw signal')
plt.plot(time2plot_filter[n1:], pressure_filtered[n1:], 'c', linewidth=2, zorder=2, label='Filtered signal')
plt.axvline(x=dayrissaga, linewidth=3, color='r', alpha=0.5)
plt.xlabel('Time')
plt.ylabel(("Pressure\n (%s)" % (pressure_units)), ha='right', rotation=0)
plt.legend()
ax.xaxis.set_major_locator(dates.DayLocator())
ax.xaxis.set_major_formatter(hfmt)
plt.grid()
plt.savefig(os.path.join(figdir, 'SantAntoni_timeseries_' + dayrissaga.strftime('%Y%m%d')))
plt.close()
fig = plt.figure(num=None, figsize=(14, 6))
ax = fig.add_subplot(111)
plt.plot(time2plot[n1 / 2:-n1 / 2], pressure[n1 / 2:-n1 / 2] - pressure_filtered[n1:], 'k', lw=0.5)
plt.axvline(x=dayrissaga, linewidth=3, color='r', alpha=0.5)
plt.xlabel('Time')
plt.ylabel(("Pressure anomaly\n (%s)" % pressure_units), ha='right', rotation=0)
ax.set_xlim(time2plot[0], time2plot[-1])
ax.xaxis.set_major_locator(dates.DayLocator())
ax.xaxis.set_major_formatter(hfmt)
fig.autofmt_xdate()
plt.grid()
plt.savefig(os.path.join(figdir, 'SantAntoni_anomalies_' + dayrissaga.strftime('%Y%m%d')))
plt.close()
示例3: show_tbounds
def show_tbounds(t):
print 'Start date: ', num2date(t[0],t.units)
try:
print 'End date: ', num2date(t[-1],t.units)
except IndexError:
print num2date(t[0],t.units)
示例4: are_time_axis_the_same
def are_time_axis_the_same(filenames):
# print "inside get times func"
# print filenames
times = {}
for key in filenames:
# print filenames[key]
times[key] = getCoordinateVariable(netCDF.Dataset(filenames[key], "r+"), "Time")
keys = times.keys()
# if (len(times[keys[0]]) != len(times[keys[1]]) ):
# pass
# return False
# else:
time_range = len(times[keys[0]]) if len(times[keys[0]]) > len(times[keys[1]]) else len(times[keys[1]])
# print "using range %d" % time_range
# print len(times[keys[0]])
# print len(times[keys[1]])
for x in range(time_range):
time1 = datetime.datetime.strptime(
netCDF.num2date(times[keys[0]][x], times[keys[0]].units, calendar="standard").isoformat(),
"%Y-%m-%dT%H:%M:%S",
)
time2 = datetime.datetime.strptime(
netCDF.num2date(times[keys[1]][x], times[keys[1]].units, calendar="standard").isoformat(),
"%Y-%m-%dT%H:%M:%S",
)
# print time1, time2
# print times[keys[0]][x] , times[keys[1]][x]
dif = time1 - time2
# print dif
if dif > timedelta.min:
return False
return True
示例5: rcs_model
def rcs_model(winlen, modfile):
from grid_tools import trim_time_jandec
from netCDF4 import num2date
from netCDF4 import date2num
from scipy import ndimage
from netcdf_tools import ncextractall
from convert import mmd_mmm
#Extract the model data and clip to the required start and end months
modnc = ncextractall(modfile)
mdata = modnc['pr']
mdata = mdata*86400. #convert to same units as obs
mlon = modnc['lon']
mlat = modnc['lat']
mtime = modnc['time']
time_u = modnc['time_units']
if 'time_calendar' in modnc.keys():
cal = modnc['time_calendar']
mtime = num2date(mtime,units = time_u, calendar=cal)
else: mtime = num2date(mtime,units = time_u)
mdata, mtime = trim_time_jandec(mdata, mtime)
mdata = mmd_mmm(mdata)
mdata = ndimage.filters.uniform_filter(mdata,size=[winlen,1,1])
#Trim first or last values if required as they are unrepresentative
trim = int(winlen/2)
mdata = mdata[trim:,:,:]
if winlen % 2 == 0: trim = trim - 1
mdata = mdata[:-trim,:,:]
return, mdata, mlat, mlon
示例6: get_time_from_dim
def get_time_from_dim(cls, time_var):
"""Get min/max from a NetCDF time variable and convert to datetime"""
ndim = len(time_var.shape)
if ndim == 0:
ret_val = time_var.item()
res = ret_val, ret_val
elif ndim == 1:
# NetCDF Users' Guide states that when time is a coordinate variable,
# it should be monotonically increasing or decreasing with no
# repeated variables. Therefore, first and last elements for a
# vector should correspond to start and end time or end and start
# time respectively. See Section 2.3.1 of the NUG
res = time_var[0], time_var[-1]
else:
# FIXME: handle multidimensional time variables. Perhaps
# take the first and last element of time variable in the first
# dimension and then take the min and max of the resulting values
return None, None
# if not > 1d, return the min and max elements found
min_elem, max_elem = np.min(res), np.max(res)
if hasattr(time_var, 'calendar'):
num2date([min_elem, max_elem], time_var.units,
time_var.calendar)
return num2date([min_elem, max_elem], time_var.units,
time_var.calendar)
else:
return num2date([min_elem, max_elem], time_var.units)
示例7: _init_fields
def _init_fields(self, nc_dataset):
nc_vars = nc_dataset.variables
lons = nc_vars["lon"][:]
lats = nc_vars["lat"][:]
if lons.ndim == 1:
lats2d, lons2d = np.meshgrid(lats, lons)
elif lons.ndim == 2:
lats2d, lons2d = lats, lons
else:
raise NotImplementedError("Cannot handle {}-dimensional coordinates".format(lons.ndim))
self.lons2d, self.lats2d = lons2d, lats2d
self.times_var = nc_vars["time"]
self.times_num = nc_vars["time"][:]
if hasattr(self.times_var, "calendar"):
self.times = num2date(self.times_num, self.times_var.units, self.times_var.calendar)
else:
self.times = num2date(self.times_num, self.times_var.units)
if not self.lazy:
self.var_data = nc_vars[self.var_name][:]
if nc_vars[self.var_name].shape[1:] != self.lons2d.shape:
print("nc_vars[self.var_name].shape = {}".format(nc_vars[self.var_name].shape))
self.var_data = np.transpose(self.var_data, axes=[0, 2, 1])
x_in, y_in, z_in = lat_lon.lon_lat_to_cartesian(self.lons2d.flatten(), self.lats2d.flatten())
self.kdtree = cKDTree(list(zip(x_in, y_in, z_in)))
示例8: check_time_extents
def check_time_extents(self, ds):
"""
Check that the values of time_coverage_start/time_coverage_end approximately match the data.
"""
if not (hasattr(ds, 'time_coverage_start') and hasattr(ds, 'time_coverage_end')):
return
# Parse the ISO 8601 formatted dates
try:
t_min = dateparse(ds.time_coverage_start)
t_max = dateparse(ds.time_coverage_end)
except:
return Result(BaseCheck.MEDIUM,
False,
'time_coverage_extents_match',
['time_coverage attributes are not formatted properly. Use the ISO 8601:2004 date format, preferably the extended format.'])
timevar = cfutil.get_time_variable(ds)
if not timevar:
return Result(BaseCheck.MEDIUM,
False,
'time_coverage_extents_match',
['Could not find time variable to test extent of time_coverage_start/time_coverage_end, see CF-1.6 spec chapter 4.4'])
# Time should be monotonically increasing, so we make that assumption here so we don't have to download THE ENTIRE ARRAY
try:
# num2date returns as naive date, but with time adjusted to UTC
# we need to attach timezone information here, or the date
# subtraction from t_min/t_max will assume that a naive timestamp is
# in the same time zone and cause erroneous results.
# Pendulum uses UTC by default, but we are being explicit here
time0 = pendulum.instance(num2date(ds.variables[timevar][0],
ds.variables[timevar].units), 'UTC')
time1 = pendulum.instance(num2date(ds.variables[timevar][-1],
ds.variables[timevar].units), 'UTC')
except:
return Result(BaseCheck.MEDIUM,
False,
'time_coverage_extents_match',
['Failed to retrieve and convert times for variables %s.' % timevar])
start_dt = abs(time0 - t_min)
end_dt = abs(time1 - t_max)
score = 2
msgs = []
if start_dt > timedelta(hours=1):
msgs.append("Date time mismatch between time_coverage_start and actual "
"time values %s (time_coverage_start) != %s (time[0])" % (t_min.isoformat(), time0.isoformat()))
score -= 1
if end_dt > timedelta(hours=1):
msgs.append("Date time mismatch between time_coverage_end and actual "
"time values %s (time_coverage_end) != %s (time[N])" % (t_max.isoformat(), time1.isoformat()))
score -= 1
return Result(BaseCheck.MEDIUM,
(score, 2),
'time_coverage_extents_match',
msgs)
示例9: doflow_fb
def doflow_fb(first_frame, second_frame, winSize = (5,5), filter_len = 10, sig_min = 150, n_iter = 40, levels = 1):
im0 = copy.deepcopy(first_frame.fields['IR_filt']['data'])
im0[np.where(im0 < sig_min)] = sig_min
im1 = copy.deepcopy(second_frame.fields['IR_filt']['data'])
im1[np.where(im1 < sig_min)] = sig_min
sim0 = (im0 - im0.min())*(im0.max()/(im0.max()-im0.min()))
sim1 = (im1 - im1.min())*(im1.max()/(im1.max()-im1.min()))
u, v = get_optic_flow_fb(sim0[0],
sim1[0],
winSize = winSize[0], n_iter=n_iter, levels=levels)
t1 = netCDF4.num2date(second_frame.axes['time']['data'][0], units = second_frame.axes['time']['units'])
t0 = netCDF4.num2date(first_frame.axes['time']['data'][0], units = first_frame.axes['time']['units'])
dt = (t1-t0).seconds
dx = np.expand_dims(np.gradient(second_frame.fields['x']['data'])[1], 0)
dy = np.expand_dims(np.gradient(second_frame.fields['y']['data'])[0], 0)
u_fld = {'data' : dt * ndimage.median_filter(u.reshape([1,u.shape[0], u.shape[1]]),filter_len)/dx,
'units' :'pixels',
'standard_name' : 'disp',
'long name' : 'todo'}
v_fld = {'data' : dt * ndimage.median_filter( v.reshape([1,v.shape[0], v.shape[1]]),filter_len)/dy,
'units' :'pixels',
'standard_name' : 'disp',
'long name' : 'todo'}
return u_fld, v_fld
示例10: get_monthly_time_slices
def get_monthly_time_slices(ncvar_time):
'''
Based on an input NetCDF4 time variable returns calendar appropriate monthly slices
'''
assert 'calendar' in ncvar_time.ncattrs(), "Time variable does not have a defined calendar"
cal = ncvar_time.calendar
assert 'units' in ncvar_time.ncattrs(), "Time variable must have 'unit' attribute"
units = ncvar_time.units
assert len(ncvar_time.dimensions) == 1, "Time varaible must be single dimension"
slices = []
d_start = num2date(ncvar_time[0], units, cal)
current_month = d_start.month
t_start = 0
for i, val in enumerate(ncvar_time):
d = num2date(val, units, cal)
if d.month != current_month:
slices.append(slice(t_start, i))
t_start = i
current_month = d.month
slices.append(slice(t_start, i+1))
return slices
示例11: _get_old_hiwrap_time
def _get_old_hiwrap_time(fname, ncFile, Good_Indices):
"""
Pull the time from HIWRAP file and convert to AWOT useable.
The time structure is odd here (to me) and is in
seconds since last Sunday.
The assumption that the data is the 4th 'field' in the filename
is required to make this work.
"""
# Pull out the date, convert the date to a datetime friendly string
# Adds dashes between year, month, and day
yyyymmdd = fname.split("_")[3]
# Find the date for Sunday previous and
# check this (should be = 6 for Sunday)
startday = int(yyyymmdd[6:8]) - int(divmod(
ncFile.variables['time'][0], 24 * 3600)[0])
if datetime.date(ncFile.variables['year'][:],
int(yyyymmdd[4:6]), startday).weekday() != 6:
print("Time could be incorrect, check file to see if time units "
"are 'computer time (sec from last Sunday at 12 am)'")
StartDate = yyyymmdd[0:4] + '-' + yyyymmdd[4:6] + '-' + str(startday)
# Create the time array
# Now convert the time array into a datetime instance
dtHrs = num2date(ncFile.variables['time'][
Good_Indices], 'seconds since ' + StartDate + '00:00:00+0:00')
# Now convert this datetime instance into a number of seconds since Epoch
TimeSec = date2num(dtHrs, common.EPOCH_UNITS)
# Now once again convert this data into a datetime instance
Time_unaware = num2date(TimeSec, common.EPOCH_UNITS)
Time = {'data': Time_unaware, 'units': common.EPOCH_UNITS,
'title': 'Time', 'full_name': 'Time (UTC)'}
return Time
示例12: getdates
def getdates(f):
""" Returns the years from a filename and directory path
Parameters
----------
string : name of file including path
Returns
-------
string of start year
string of end year
"""
nc = Dataset(f, 'r')
time = nc.variables['time_bnds'][:].squeeze()
nc_time = nc.variables['time']
try:
cal = nc_time.calendar
except:
cal = 'standard'
start = nc_time[:][0]
end = nc_time[:][-1]
start = num2date(start, nc_time.units, cal)
end = num2date(end, nc_time.units, cal)
start = start.year
end = end.year
return start, end
示例13: runTest
def runTest(self):
# Get the real dates
# skip this until cftime pull request #55 is in a released
# version (1.0.1?). Otherwise, fix for issue #808 breaks this
if parse_version(cftime.__version__) >= parse_version('1.0.1'):
dates = []
for file in self.files:
f = Dataset(file)
t = f.variables['time']
dates.extend(num2date(t[:], t.units, t.calendar))
f.close()
# Compare with the MF dates
f = MFDataset(self.files,check=True)
t = f.variables['time']
mfdates = num2date(t[:], t.units, t.calendar)
T = MFTime(t)
assert_equal(len(T), len(t))
assert_equal(T.shape, t.shape)
assert_equal(T.dimensions, t.dimensions)
assert_equal(T.typecode(), t.typecode())
# skip this until cftime pull request #55 is in a released
# version (1.0.1?). Otherwise, fix for issue #808 breaks this
if parse_version(cftime.__version__) >= parse_version('1.0.1'):
assert_array_equal(num2date(T[:], T.units, T.calendar), dates)
assert_equal(date2index(datetime.datetime(1980, 1, 2), T), 366)
f.close()
示例14: read_nc
def read_nc(infile, varname, dimension=-1, is_time=0):
'''Read a variable from a netCDF file
Input:
input file path
variable name
dimension: if < 0, read in all dimensions of the variable; if >= 0, only read in the [dimension]th of the variable (index starts from 0). For example, if the first dimension of the variable is time, and if dimension=2, then only reads in the 3rd time step.
is_time: if the desired variable is time (1 for time; 0 for not time). If it is time, return an array of datetime object
Return:
var: a numpy array of
'''
from netCDF4 import Dataset
from netCDF4 import num2date
nc = Dataset(infile, 'r')
if is_time==0: # if not time variable
if dimension<0:
var = nc.variables[varname][:]
else:
var = nc.variables[varname][dimension]
if is_time==1: # if time variable
time = nc.variables[varname]
if hasattr(time, 'calendar'): # if time variable has 'calendar' attribute
if dimension<0:
var = num2date(time[:], time.units, time.calendar)
else:
var = num2date(time[dimension], time.units, time.calendar)
else: # if time variable does not have 'calendar' attribute
if dimension<0:
var = num2date(time[:], time.units)
else:
var = num2date(time[dimension], time.units)
nc.close()
return var
示例15: get_pasap_plot_title
def get_pasap_plot_title(dset,
varname = 'hr24_prcp',
timestep= 0,
):
""" Given an open pydap object, and some extra information, return a nice
plot title.
"""
header = "PASAP: Dynamical Seasonal Outlooks for the Pacific."
subheader1 = "Outlook based on POAMA 1.5 CGCM adjusted for historical skill"
subheader2 = "Experimental outlook for demonstration and research only"
time_var = dset['time']
if 'units' in time_var.attributes.keys():
time_units = time_var.attributes['units']
else:
time_units = ''
if 'units' in dset[varname].attributes.keys():
units = dset[varname].attributes['units']
else:
units = ''
valid_time = datetime.datetime.strftime(
num2date(time_var[timestep],time_units),"%Y%m%d")
start_date = datetime.datetime.strftime(
num2date(dset['init_date'][0],time_units),"%Y%m%d")
period_label = str(dset['time_label'][timestep])
titlestring = header + '\n' \
+ subheader1 + '\n' \
+ subheader2 + '\n' \
+ "Variable: " + varname + ' (' + units + ')' + '\n' \
+ 'Model initialised ' + start_date + '\n' \
# + 'Forecast period: ' + period_label
return titlestring