本文整理汇总了Python中netCDF4.date2num函数的典型用法代码示例。如果您正苦于以下问题:Python date2num函数的具体用法?Python date2num怎么用?Python date2num使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了date2num函数的20个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Python代码示例。
示例1: subset_merra_data
def subset_merra_data(url, llat=23., ulat=51., llon=-130., rlon=-65., year=2010):
'''
Function to slice MERRA Reanalysis Precipitation data over particular region
(default region is centered over the contiguous U.S.) for a specified year.
It returns a monthly and summertime average daily rainfall for plotting purposes.
'''
nc = Dataset(url)
time = nc.variables['time']
lon = nc.variables['lon'][:]
lat = nc.variables['lat'][:]
# indices for slicing the data over particular year and region
strt_time = int(date2num(datetime(year,01,01),time.units) - time[0])
stp_time = int(date2num(datetime(year,12,31),time.units) - time[0])
dates = num2date(time[strt_time:stp_time],time.units)
llat_ind = np.argwhere(lat == llat)[0,0]
ulat_ind = np.argwhere(lat == ulat)[0,0]
llon_ind = np.argwhere(lon >= llon)[0,0]
rlon_ind = np.argwhere(lon >= rlon)[0,0]
# downloading sliced rainfall data
precip = nc.variables['prectot'][strt_time:stp_time+1, llat_ind:ulat_ind,llon_ind:rlon_ind]
precip = 24*60*60*precip # converting to mm/day
lon = lon[llon_ind:rlon_ind]
lat = lat[llat_ind:ulat_ind]
# computing monthly averages
m_precip = []
mm = [dd.month for dd in dates]
for months in np.arange(1,13):
m_precip.append(precip[np.argwhere(mm == months)].mean(axis=0)[0])
m_precip = np.array(m_precip)
# summertime average daily rainfall for 2-D plotting over map
jja_precip = m_precip[5:8,:,:].mean(axis=0)
return lat,lon,m_precip, jja_precip
开发者ID:tarunvee,项目名称:HW,代码行数:32,代码来源:netcdf_basemap.py
示例2: get_numtime
def get_numtime(self, arr):
"""
:param arr: An array of ``datetime``-like objects to convert to numeric time.
:type arr: :class:`numpy.ndarray`
:returns: An array of numeric values with same shape as ``arr``.
:rtype: :class:`numpy.ndarray`
"""
arr = np.atleast_1d(arr)
try:
ret = nc.date2num(arr, str(self.units), calendar=self.calendar)
except (ValueError, TypeError):
# Special behavior for conversion of time units with months.
if self._has_months_units:
ret = get_num_from_months_time_units(arr, self.units, dtype=None)
else:
# Odd behavior in netcdftime objects? Try with datetime objects.
flat_arr = arr.flatten()
fill = np.zeros(flat_arr.shape, dtype=object)
for idx, element in enumerate(flat_arr):
fill[idx] = datetime.datetime(element.year, element.month, element.day, element.hour,
element.minute, element.second, element.microsecond)
fill = fill.reshape(arr.shape)
ret = np.atleast_1d(nc.date2num(fill, str(self.units), calendar=self.calendar))
return ret
开发者ID:NCPP,项目名称:ocgis,代码行数:25,代码来源:temporal.py
示例3: dictionary_of_data_to_netcdf
def dictionary_of_data_to_netcdf(self, ncFileName, dataDictionary, timeBounds, timeStamp = None, posCnt = None):
rootgrp = nc.Dataset(ncFileName, 'a')
lowerTimeBound = timeBounds[0]
upperTimeBound = timeBounds[1]
if timeStamp == None: timeStamp = lowerTimeBound + (upperTimeBound - lowerTimeBound) / 2
# time
date_time = rootgrp.variables['time']
if posCnt == None: posCnt = len(date_time)
date_time[posCnt] = nc.date2num(timeStamp, date_time.units, date_time.calendar)
# time bounds
time_bounds = rootgrp.variables['time_bounds']
time_bounds[posCnt, 0] = nc.date2num(lowerTimeBound, date_time.units, date_time.calendar)
time_bounds[posCnt, 1] = nc.date2num(upperTimeBound, date_time.units, date_time.calendar)
shortVarNameList = dataDictionary.keys()
for shortVarName in shortVarNameList:
varField = dataDictionary[shortVarName]
# flip variable if necessary (to follow cf_convention)
if self.netcdf_y_orientation_follow_cf_convention: varField = np.flipud(varField)
# the variable
rootgrp.variables[shortVarName][posCnt,:,:] = varField
rootgrp.sync()
rootgrp.close()
开发者ID:edwinkost,项目名称:extreme_value_analysis,代码行数:30,代码来源:output_netcdf_cf_convention.py
示例4: setUp
def setUp(self):
self.standardtime = self.TestTime(datetime(1950, 1, 1), 366, 24, "hours since 1900-01-01", "standard")
self.file = tempfile.mktemp(".nc")
f = Dataset(self.file, "w")
f.createDimension("time", None)
time = f.createVariable("time", float, ("time",))
time.units = "hours since 1900-01-01"
time[:] = self.standardtime[:]
f.createDimension("time2", 1)
time2 = f.createVariable("time2", "f8", ("time2",))
time2.units = "days since 1901-01-01"
self.first_timestamp = datetime(2000, 1, 1)
time2[0] = date2num(self.first_timestamp, time2.units)
ntimes = 21
f.createDimension("record", ntimes)
time3 = f.createVariable("time3", numpy.int32, ("record",))
time3.units = "seconds since 1970-01-01 00:00:00"
date = datetime(2037, 1, 1, 0)
dates = [date]
for ndate in range(ntimes - 1):
date += (ndate + 1) * timedelta(hours=1)
dates.append(date)
time3[:] = date2num(dates, time3.units)
f.close()
开发者ID:lesliekim,项目名称:netcdf4-python,代码行数:25,代码来源:tst_netcdftime.py
示例5: convert_osu_file
def convert_osu_file(file, sheet_names):
for s in sheet_names:
name = str(s)
print '\nConverting sheet name: %s' %name
f = pd.read_excel(file, sheetname=name, skiprows=1)
for i,j in f.iterrows():
ctd_sn = int(j['SN'])
ctd_uid = 'CGINS-CTDGVM-' + '{0:05d}'.format(ctd_sn)
print ctd_uid
ctd_inst = 'CTDGVM'
ctd_caldate_str = str(j['Cal Date'])[0:10].replace('-','')
ctd_caldate_num = int(nc.date2num(j['Cal Date'],'seconds since 1970-01-01'))*1000
ctd_sdir = os.path.join(dir,ctd_inst)
create_dir(ctd_sdir)
write_csv(ctd_uid, ctd_caldate_str, ctd_caldate_num, ctd_sn, ctd_sdir)
do_sn = int(j['SN.3'])
do_uid = 'CGINS-DOSTAM-' + '{0:05d}'.format(do_sn)
print do_uid
do_inst = 'DOSTAM'
do_caldate_str = str(j['Cal Date.3'])[0:10].replace('-','')
do_caldate_num = int(nc.date2num(j['Cal Date.3'],'seconds since 1970-01-01'))*1000
do_sdir = os.path.join(dir,do_inst)
create_dir(do_sdir)
write_csv(do_uid, do_caldate_str, do_caldate_num, do_sn, do_sdir)
开发者ID:ooi-data-review,项目名称:check_ooi_nc,代码行数:26,代码来源:glider_sensor_calsheets.py
示例6: setUp
def setUp(self):
self.standardtime = self.TestTime(datetime(1950, 1, 1), 366, 24,
'hours since 1900-01-01', 'standard')
self.file = tempfile.NamedTemporaryFile(suffix='.nc', delete=False).name
f = Dataset(self.file, 'w')
f.createDimension('time', None)
time = f.createVariable('time', float, ('time',))
time.units = 'hours since 1900-01-01'
time[:] = self.standardtime[:]
f.createDimension('time2', 1)
time2 = f.createVariable('time2', 'f8', ('time2',))
time2.units = 'days since 1901-01-01'
self.first_timestamp = datetime(2000, 1, 1)
time2[0] = date2num(self.first_timestamp, time2.units)
ntimes = 21
f.createDimension("record", ntimes)
time3 = f.createVariable("time3", numpy.int32, ("record", ))
time3.units = "seconds since 1970-01-01 00:00:00"
date = datetime(2037,1,1,0)
dates = [date]
for ndate in range(ntimes-1):
date += (ndate+1)*timedelta(hours=1)
dates.append(date)
time3[:] = date2num(dates,time3.units)
f.close()
开发者ID:ckhroulev,项目名称:netcdf4-python,代码行数:26,代码来源:tst_netcdftime.py
示例7: _check_dates_outside
def _check_dates_outside(ifile, start_date, end_date):
""" Checks if the comparison data is outside of the dates for the plot
Returns True if the dates of the data are completely outside of the
desired dates.
Returns False if the dates overlap at all, but prints a warning if
it is only a subset.
"""
# Load data from file into Dataset object
nc = Dataset(ifile, 'r')
nc_time = nc.variables['time']
try:
cal = nc_time.calendar
except:
cal = 'standard'
# convert dates to datetime object
start = datetime.datetime(*year_mon_day(start_date))
end = datetime.datetime(*year_mon_day(end_date))
# convert datetime objects to integers
start = date2num(start, nc_time.units, calendar=cal)
end = date2num(end, nc_time.units, calendar=cal)
# get start and end dates of file
compstart = nc_time[:][0]
compend = nc_time[:][-1]
# make comparison
if compstart > end or compend < start:
return True
elif compstart > start or compend < end:
with open('logs/log.txt', 'a') as outfile:
outfile.write('WARNING: Comparison data does not cover entire time period... Used subset\n')
return False
开发者ID:swartn,项目名称:validate,代码行数:34,代码来源:data_loader.py
示例8: test_select_nc
def test_select_nc(self):
f = Dataset(self.file, 'r')
nutime = f.variables['time']
dates = [datetime(1950, 1, 2, 6), datetime(
1950, 1, 3), datetime(1950, 1, 3, 18)]
t = date2index(dates, nutime, select='before')
assert_equal(t, [1, 2, 2])
t = date2index(dates, nutime, select='after')
assert_equal(t, [2, 2, 3])
t = date2index(dates, nutime, select='nearest')
assert_equal(t, [1, 2, 3])
# Test dates outside the support with select
t = date2index(datetime(1949, 12, 1), nutime, select='nearest')
assert_equal(t, 0)
t = date2index(datetime(1978, 1, 1), nutime, select='nearest')
assert_equal(t, 365)
# Test dates outside the support with before
self.assertRaises(
ValueError, date2index, datetime(1949, 12, 1), nutime, select='before')
t = date2index(datetime(1978, 1, 1), nutime, select='before')
assert_equal(t, 365)
# Test dates outside the support with after
t = date2index(datetime(1949, 12, 1), nutime, select='after')
assert_equal(t, 0)
self.assertRaises(
ValueError, date2index, datetime(1978, 1, 1), nutime, select='after')
# test microsecond and millisecond units
unix_epoch = "milliseconds since 1970-01-01T00:00:00Z"
d = datetime(2038, 1, 19, 3, 14, 7)
millisecs = int(
date2num(d, unix_epoch, calendar='proleptic_gregorian'))
assert_equal(millisecs, (2 ** 32 / 2 - 1) * 1000)
unix_epoch = "microseconds since 1970-01-01T00:00:00Z"
microsecs = int(date2num(d, unix_epoch))
assert_equal(microsecs, (2 ** 32 / 2 - 1) * 1000000)
# test microsecond accuracy in date2num/num2date roundtrip
# note: microsecond accuracy lost for time intervals greater
# than about 270 years.
units = 'microseconds since 1776-07-04 00:00:00-12:00'
dates =\
[datetime(1962, 10, 27, 6, 1, 30, 9001), datetime(
1993, 11, 21, 12, 5, 25, 999), datetime(1995, 11, 25, 18, 7, 59, 999999)]
times2 = date2num(dates, units)
dates2 = num2date(times2, units)
for date, date2 in zip(dates, dates2):
assert_equal(date, date2)
f.close()
开发者ID:ckhroulev,项目名称:netcdf4-python,代码行数:57,代码来源:tst_netcdftime.py
示例9: fix_time
def fix_time(cgfile, time):
times = cgfile.createVariable('time', 'double', ('time', ))
times.units = 'seconds since 1970-01-01 00:00:00.0'
times.calendar = 'gregorian'
times.standard_name = 'time'
print "time here is"
print time
print netCDF4.date2num(time, units=times.units, calendar=times.calendar)
times[0] = netCDF4.date2num(time, units=times.units,
calendar=times.calendar)
print times
开发者ID:vanandel,项目名称:pyart,代码行数:11,代码来源:nc_utils.py
示例10: setup_getfeatureinfo
def setup_getfeatureinfo(self, ncd, variable_object, request, location=None):
location = location or 'face'
try:
latitude = request.GET['latitude']
longitude = request.GET['longitude']
# Find closest cell or node (only node for now)
if location == 'face':
tree = rtree.index.Index(self.face_tree_root)
elif location == 'node':
tree = rtree.index.Index(self.node_tree_root)
else:
raise NotImplementedError("No RTree for location '{}'".format(location))
nindex = list(tree.nearest((longitude, latitude, longitude, latitude), 1, objects=True))[0]
closest_x, closest_y = tuple(nindex.bbox[2:])
geo_index = nindex.object
except BaseException:
raise
finally:
tree.close()
# Get time indexes
time_var_name = find_appropriate_time(variable_object, ncd.get_variables_by_attributes(standard_name='time'))
time_var = ncd.variables[time_var_name]
if hasattr(time_var, 'calendar'):
calendar = time_var.calendar
else:
calendar = 'gregorian'
start_nc_num = round(nc4.date2num(request.GET['starting'], units=time_var.units, calendar=calendar))
end_nc_num = round(nc4.date2num(request.GET['ending'], units=time_var.units, calendar=calendar))
all_times = time_var[:]
start_nc_index = bisect.bisect_right(all_times, start_nc_num)
end_nc_index = bisect.bisect_right(all_times, end_nc_num)
try:
all_times[start_nc_index]
except IndexError:
start_nc_index = all_times.size - 1
try:
all_times[end_nc_index]
except IndexError:
end_nc_index = all_times.size - 1
if start_nc_index == end_nc_index:
if start_nc_index > 0:
start_nc_index -= 1
elif end_nc_index < all_times.size:
end_nc_index += 1
return_dates = nc4.num2date(all_times[start_nc_index:end_nc_index], units=time_var.units, calendar=calendar)
return geo_index, closest_x, closest_y, start_nc_index, end_nc_index, return_dates
开发者ID:brianmckenna,项目名称:sci-wms,代码行数:53,代码来源:netcdf.py
示例11: download_time
def download_time(self, dataset):
"""
"""
self.logger.debug("Downloading time")
t = dataset['time'][:]
if 't_ini' not in self.cfg['limits']:
if 'd_ini' in self.cfg['limits']:
assert type(self.cfg['limits']['d_ini']) == datetime, \
"limits:d_ini must be a datetime"
d = date2num(self.cfg['limits']['d_ini'],
dataset['time'].attributes['units'])
self.cfg['limits']['t_ini'] = np.nonzero(t>=d)[0][0]
else:
self.cfg['limits']['t_ini'] = 0
self.logger.debug("Setting t_ini: %s" % self.cfg['limits']['t_ini'])
if 't_step' not in self.cfg['limits']:
self.cfg['limits']['t_step'] = 1
self.logger.debug("Setting t_step: %s" % self.cfg['limits']['t_step'])
if 't_fin' not in self.cfg['limits']:
if 'd_fin' in self.cfg['limits']:
assert type(self.cfg['limits']['d_fin']) == datetime, \
"limits:d_ini must be a datetime"
d = date2num(self.cfg['limits']['d_fin'],
dataset['time'].attributes['units'])
self.cfg['limits']['t_fin'] = np.nonzero(t>d)[0][0]
else:
self.cfg['limits']['t_fin'] = dataset['time'].shape[0]
self.logger.debug("Setting t_fin: %s" % self.cfg['limits']['t_fin'])
t_ini = self.cfg['limits']['t_ini']
t_fin = self.cfg['limits']['t_fin']
t_step = self.cfg['limits']['t_step']
# ----
data={}
#
#from coards import from_udunits
#t0=datetime(1950,1,1)
#if (re.match('^hours since \d{4}-\d{2}-\d{2}$',dataset_h['time'].attributes['units'])):
#if (re.match('^hours since 1950-01-01',self.dataset['h']['time'].attributes['units'])):
# t = self.dataset['h']['time'][t_ini:t_fin:t_step].tolist()
# data['datetime'] = numpy.array([t0+timedelta(hours=h) for h in t])
#else:
# self.logger.error("Problems interpreting the time")
t = dataset['time'][t_ini:t_fin:t_step].tolist()
self.nc.createDimension('time', len(range(t_ini,t_fin,t_step)))
nct = self.nc.createVariable('time', 'f8', ('time', ))
nct[:] = t
nct.units = dataset['time'].attributes['units']
开发者ID:castelao,项目名称:PyAVISO,代码行数:52,代码来源:aviso.py
示例12: __next_update_out_data
def __next_update_out_data(self):
""" Determine the count for when the next write should occur """
# ------------------------------------------------------------ #
# If monthly, write at (YYYY,MM,1,0,0)
# b0 is first timestep of next period
# b1 is end of last timestep of next period
b0 = self._time_ord
self._begtime = b0
if self._nhtfrq == 0:
if self._timestamp.month == 12:
b1 = date2num(datetime(self._timestamp.year + 1, 2, 1),
TIMEUNITS, calendar=self._calendar)
else:
b1 = date2num(datetime(self._timestamp.year,
self._timestamp.month + 1, 1),
TIMEUNITS, calendar=self._calendar)
# If some hours in the future
elif self._nhtfrq < 0:
b1 = b0 - (self._nhtfrq / HOURSPERDAY)
# If some dts in the future
else:
b1 = b0 + (self._nhtfrq * self._dt / SECSPERDAY)
# ------------------------------------------------------------ #
# ------------------------------------------------------------ #
# Get the number of timesteps and datestamp for the next write
# next_ord is the ord_time when the write will happen
self._update_count = int(round((b1 - b0) / (self._dt / SECSPERDAY)))
# ------------------------------------------------------------ #
# ------------------------------------------------------------ #
# Get next file names and timeord
if self._avgflag == 'I':
self._write_ord = b1
self.filename = num2date(b1, TIMEUNITS,
calendar=self._calendar).strftime(self._fname_format)
else:
self._time_bnds = np.array([[b0, b1]])
self._write_ord = np.average(self._time_bnds)
self.filename = num2date(b0, TIMEUNITS,
calendar=self._calendar).strftime(self._fname_format)
self.rest_filename = num2date(b1, TIMEUNITS,
calendar=self._calendar).strftime(self._fname_format)
# ------------------------------------------------------------ #
# ------------------------------------------------------------ #
# Set the count to zero
self._count = 0
开发者ID:tcchiao,项目名称:RVIC,代码行数:52,代码来源:history.py
示例13: check_dates
def check_dates(beginDate,endDate):
begin_DT = datetime.strptime(beginDate,'%Y-%m-%dT%H:%M:%SZ')
beginDT = int(nc.date2num(begin_DT,'seconds since 1970-01-01')*1000)
try:
end_DT = datetime.strptime(endDate,'%Y-%m-%dT%H:%M:%SZ')
endDT = int(nc.date2num(end_DT,'seconds since 1970-01-01')*1000)
if endDT >= beginDT:
return beginDT, endDT
else:
raise Exception('beginDate (%s) is after endDate (%s)' %(begin_DT,end_DT))
except ValueError:
endDT = ''
return beginDT, endDT
开发者ID:ooi-data-review,项目名称:check_ooi_nc,代码行数:13,代码来源:push_annotations.py
示例14: read_variable
def read_variable(source_file, variable, date, date_to=None):
"""Gets images from a netCDF file.
Reads the image for a specific date. If date_to is given, it will return
multiple images in a multidimensional numpy.ndarray
Parameters
----------
source_file : str
Path to source file.
variable : str
Requested variable of image.
date : datetime.datetime
Date of the image, start date of data cube if date_to is set.
date_to : datetime.date, optional
End date of data cube to slice from NetCDF file.
Returns
-------
image : numpy.ndarray
Image for a specific date.
lon : numpy.array
Longitudes of the image.
lat : numpy.array
Latgitudes of the image.
metadata : dict of strings
Metadata from source netCDF file.
"""
with Dataset(source_file, 'r', format='NETCDF4') as nc:
times = nc.variables['time']
lon = nc.variables['lon'][:]
lat = nc.variables['lat'][:]
var = nc.variables[variable]
metadata = {}
for attr in var.ncattrs():
if attr[0] != '_' and attr != 'scale_factor':
metadata[attr] = var.getncattr(attr)
numdate = date2num(date, units=times.units, calendar=times.calendar)
if date_to is None:
image = var[np.where(times[:] == numdate)[0][0]]
else:
numdate_to = date2num(date_to, units=times.units,
calendar=times.calendar)
subset = np.where((times[:] >= numdate) & (times[:] <= numdate_to))
image = var[subset]
return image, lon, lat, metadata
开发者ID:TUW-GEO,项目名称:poets,代码行数:51,代码来源:netcdf.py
示例15: subset
def subset(self, **kwargs):
newargs = {}
for name, value in kwargs.items():
if name in self.coords:
coord = self.coords[name]
#print "subset ", name, " using ", coord[:], coord._subset, value
if type(value) == tuple:
start, stop = value
else:
start = stop = value
# try and coerce into datetimes
try:
start = netCDF4.date2num(parser.parse(start), coord.attributes['units'])
except:
pass
try:
stop = netCDF4.date2num(parser.parse(stop), coord.attributes['units'])
except:
pass
print "subset start, stop ", start, stop
# Get the actual coordinate values
coord_vals = coord[:]
# If we have dates then we try and convert coordinate values to dates:
if type(start) == datetime.datetime:
try:
coord_vals = netCDF4.num2date(coord_vals, units=coord.units, calendar=coord.calendar)
except:
pass
start_index = np.argmin(np.abs(coord_vals - start))
stop_index = np.argmin(np.abs(coord_vals - stop))
# We might need to swap around
if stop_index < start_index:
tmp = stop_index
stop_index = start_index
start_index = tmp
newargs[name] = slice(start_index, stop_index+1)
# print 'newargs = ', newargs
return self.isubset_copy(**newargs)
开发者ID:csag-uct,项目名称:climstats,代码行数:51,代码来源:dataset.py
示例16: plot_sta
def plot_sta(self, ax, vname, station, date, label):
print vname, station, date, label
if label == 'Free':
nc = self.free
if self.t_free is None:
ocean_time = nc.variables['ocean_time'][:]
time = netCDF4.date2num(date, self.sta_JST)
if type(time) == np.int64:
t = np.where(ocean_time == time)[0][0]
else:
t0 = np.where(ocean_time == time[0])[0][0]
t1 = np.where(ocean_time == time[1])[0][0]
t = np.arange(t0, t1)
self.t_free = t
else:
t = self.t_free
elif label == 'Assi':
nc = self.assi
if self.t_assi is None:
ocean_time = nc.variables['ocean_time'][:]
time = netCDF4.date2num(date, self.sta_JST)
t = np.where(ocean_time == time)[0][0]
self.t_assi = t
else:
t = self.t_assi
if vname == 'DIN':
NH4 = nc.variables['NH4'][t,station-1,:]
NO3 = nc.variables['NO3'][t,station-1,:]
var = NH4 + NO3
elif vname == 'PON':
LDeN = nc.variables['LdetritusN'][t,station-1,:]
SDeN = nc.variables['SdetritusN'][t,station-1,:]
var = LDeN + SDeN
elif vname == 'POP':
LDeP = nc.variables['LdetritusP'][t,station-1,:]
SDeP = nc.variables['SdetritusP'][t,station-1,:]
var = LDeP + SDeP
else:
var = nc.variables[vname][t,station-1,:]
line = {'Free': '--', 'Assi': '-'}
if type(t) == np.int64:
depth = self.calculate_depth(nc, t, station)
ax.plot(var, depth, line[label], label=label)
else:
depth = self.calculate_depth(nc, t[0], station)
#ax.errorbar(np.mean(var, axis=0), depth, xerr=np.std(var, axis=0), fmt=line[label], label=label)
mean = np.mean(var, axis=0)
std = np.std(var, axis=0)
#ax.fill_betweenx(depth, mean-std, mean+std, label=label)
#ax.plot(mean, depth, line[label])
special.errorfill(mean, depth, xerr=std, label=label, ax=ax, alpha_fill=0.1)
开发者ID:okadate,项目名称:romspy,代码行数:51,代码来源:profile.py
示例17: get_climatologic_field
def get_climatologic_field(self, varname = "mrro", gcm = "", rcm = "",
start_year = None, end_year = None,
months = None
):
"""
for time t: start_year <= t <= end_year
"""
mfds = MFDataset("{0}/{1}-{2}/current/{3}_*.nc".format(self.folder_with_nc_data, gcm, rcm, varname))
self.lon2d = mfds.variables[self.lon_name][:].transpose()
self.lat2d = mfds.variables[self.lat_name][:].transpose()
self._init_kd_tree()
cache_file = self._get_clim_cache_file_path(varname = varname, gcm=gcm, rcm = rcm,
start_year=start_year, end_year=end_year, months=months)
cache_file = os.path.join(self.cache_files_folder, cache_file)
if os.path.isfile(cache_file):
f = open(cache_file)
mfds.close()
return pickle.load(f)
t = mfds.variables["time"]
t_units = t.units
t_calendar = t.calendar
t_start = date2num(datetime(start_year, 1,1), t_units, calendar=t_calendar)
t_end = date2num(datetime(end_year+1, 1,1), t_units, calendar=t_calendar)
t = t[:]
t_sel = t[(t_start <= t) & (t < t_end)]
dates_sel = num2date(t_sel, t_units, calendar=t_calendar)
bool_vect = np.array( [x.month in months for x in dates_sel], dtype=np.bool )
data_sel = mfds.variables[varname][ np.where( (t_start <= t) & (t < t_end) )[0],:,:]
#save results to a cache file for reuse
result = data_sel[bool_vect,:,:].mean(axis = 0).transpose()
pickle.dump(result, open(cache_file,"w"))
mfds.close()
return result #because in the file the axes are inversed
开发者ID:guziy,项目名称:RPN,代码行数:50,代码来源:manager.py
示例18: prepare_nc
def prepare_nc(trgFile, timeList, x, y, metadata, logger, units='Days since 1900-01-01 00:00:00', calendar='gregorian',Format="NETCDF4",complevel=9,zlib=True,least_significant_digit=None):
"""
This function prepares a NetCDF file with given metadata, for a certain year, daily basis data
The function assumes a gregorian calendar and a time unit 'Days since 1900-01-01 00:00:00'
"""
import datetime as dt
logger.info('Setting up netcdf output: ' + trgFile)
startDayNr = netCDF4.date2num(timeList[0].replace(tzinfo=None), units=units, calendar=calendar)
endDayNr = netCDF4.date2num(timeList[-1].replace(tzinfo=None), units=units, calendar=calendar)
time = arange(startDayNr,endDayNr+1)
nc_trg = netCDF4.Dataset(trgFile,'w',format=Format,zlib=zlib,complevel=complevel)
logger.info('Setting up dimensions and attributes. Steps: ' + str(len(timeList)) + ' lat: ' + str(len(y))+ " lon: " + str(len(x)))
if len(time) ==1:
nc_trg.createDimension('time', 1)
else:
nc_trg.createDimension('time', 0) #NrOfDays*8
nc_trg.createDimension('lat', len(y))
nc_trg.createDimension('lon', len(x))
DateHour = nc_trg.createVariable('time','f8',('time',),fill_value=-9999., zlib=zlib,complevel=complevel)
DateHour.units = units
DateHour.calendar = calendar
DateHour.standard_name = 'time'
DateHour.long_name = 'time'
DateHour.axis = 'T'
DateHour[:] = time
y_var = nc_trg.createVariable('lat','f4',('lat',),fill_value=-9999., zlib=zlib,complevel=complevel)
y_var.standard_name = 'latitude'
y_var.long_name = 'latitude'
y_var.units = 'degrees_north'
y_var.axis = 'Y'
x_var = nc_trg.createVariable('lon','f4',('lon',),fill_value=-9999., zlib=zlib,complevel=complevel)
x_var.standard_name = 'longitude'
x_var.long_name = 'longitude'
x_var.units = 'degrees_east'
x_var.axis = 'X'
y_var[:] = y
x_var[:] = x
projection= nc_trg.createVariable('projection','c')
projection.long_name = 'wgs84'
projection.EPSG_code = 'EPSG:4326'
projection.proj4_params = '+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs'
projection.grid_mapping_name = 'latitude_longitude'
# now add all attributes from user-defined metadata
for attr in metadata:
nc_trg.setncattr(attr, metadata[attr])
nc_trg.sync()
nc_trg.close()
开发者ID:charlibrown33,项目名称:wflow,代码行数:50,代码来源:wf_netcdfio.py
示例19: add_time_bounds
def add_time_bounds(self, delta=None, position=None):
self.nc.createDimension("bounds")
time_bounds = self.nc.createVariable('{}_bounds'.format(self.time_axis_name), "f8", ("time", "bounds",), chunksizes=(1000, 2,))
time_bounds.units = "seconds since 1970-01-01T00:00:00Z"
time_bounds.calendar = "gregorian"
time_objs = netCDF4.num2date(self.time[:], units=self.time.units, calendar=self.time.calendar)
bounds_kwargs = dict(units=time_bounds.units, calendar=time_bounds.calendar)
if position == "start":
time_bounds[:] = np.asarray(zip(self.time[:], netCDF4.date2num(time_objs + delta, **bounds_kwargs)))
elif position == "middle":
time_bounds[:] = np.asarray(zip(netCDF4.date2num(time_objs - delta/2, **bounds_kwargs), netCDF4.date2num(time_objs + delta/2, **bounds_kwargs)))
elif position == "end":
time_bounds[:] = np.asarray(zip(netCDF4.date2num(time_objs - delta, **bounds_kwargs), self.time[:]))
开发者ID:lukecampbell,项目名称:pyaxiom,代码行数:15,代码来源:timeseries.py
示例20: read_values
def read_values(self, fieldname, slices=None):
"""
Read the values of a field.
`slices` is optional. When provided, give for each dimension the
corresponding python slice object to subset this dimension. Only the
dimensions to be subsetted need to be provided, by default the full
dilmension length is read.
.. code-block:: python
# extracting a subset of a field with slices
data = fd.read_values('owiWindSpeed', slices={'row':slice(10,20), 'cell':slice(30, 40)})
:param fieldname: name of the field
:type fieldname: str
:param slices: dimensions slices, when reading a subset only for some
dimensions
:type slices: Dict<str, slice>
"""
if fieldname == 'time':
# accounts for virtual variable
timeval = self.get_start_time()
return numpy.ma.array([date2num(
timeval,
TIME_CONVENTION)
])
else:
return super(SAFEOCNNCFile, self).read_values(fieldname,
slices=slices)
开发者ID:lelou6666,项目名称:PySOL,代码行数:30,代码来源:safeocnncfile.py
注:本文中的netCDF4.date2num函数示例由纯净天空整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。 |
请发表评论