本文整理汇总了Python中pydap.client.open_url函数的典型用法代码示例。如果您正苦于以下问题:Python open_url函数的具体用法?Python open_url怎么用?Python open_url使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了open_url函数的20个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Python代码示例。
示例1: get_neracoos_wind_data
def get_neracoos_wind_data(url,id_s,id_e,id_max_url): #get wind data from neracoos.
url1=url+'wind_speed[0:1:'+id_max_url+'][0:1:0][0:1:0][0:1:0],wind_direction[0:1:'+id_max_url+'][0:1:0][0:1:0][0:1:0]'
database_s=open_url(url1)['wind_speed'][int(id_s):int(id_e)]
database_d=open_url(url1)['wind_direction'][int(id_s):int(id_e)]
#lat=database_s['lat']
#lat=round(lat[0],2)
#lon=database_s['lon']
#lon=round(lon[0],2)
depth=database_s['wind_depth']
period=database_s['time']
speed=database_s['wind_speed']
speed=speed[0:].tolist()
period=num2date(period[0:]+date2num(dt.datetime(1858, 11, 17, 0, 0)))
direction=database_d['wind_direction']
direction=direction[0:].tolist()
period_str,wind_all=[],[]
for i in range(len(period)): #convert format to list
period_str.append(dt.datetime.strftime(period[i],'%Y-%m-%d-%H-%M'))
wind_all.append([round(depth[0],1),round(speed[i][0][0][0],2),round(direction[i][0][0][0],2)])
wind,direction=[],[] # figure out bad data and delete
for i in range(len(wind_all)):
wind.append(wind_all[i][1])
direction.append(wind_all[i][2])
id_bad=ml.find((np.array(wind)>300) | (np.array(wind)<-1) | (np.array(direction)<0)| (np.array(direction)>360))
#print id_bad
id_bad=list(id_bad)
id_bad.reverse()
for m in id_bad:
del period_str[m]
del wind_all[m]
return period_str,wind_all
开发者ID:jian-cui,项目名称:pyocean,代码行数:31,代码来源:neracoos_def.py
示例2: getsst
def getsst(second):
#get the index of second from the url
time_tuple = time.gmtime(second)#calculate the year from the seconds
year = time_tuple.tm_year
if year < 1999 or year > 2010:
print 'Sorry there might not be available data for this year'
# WARNING: As of Jan 2012, this data is only stored for 1999-2010
url1 = 'http://tashtego.marine.rutgers.edu:8080/thredds/dodsC/cool/avhrr/bigbight/' + str(year) + '?time[0:1:3269]'
dataset1 = open_url(url1)
times = list(dataset1['time'])
# find the nearest image index
index_second = int(round(np.interp(second, times, range(len(times)))))
#get sst, time, lat, lon from the url
url = 'http://tashtego.marine.rutgers.edu:8080/thredds/dodsC/cool/avhrr/bigbight/' + \
str(year) + '?lat[0:1:1221],lon[0:1:1182],' + \
'mcsst[' + str(index_second) + ':1:' + str(index_second) + \
'][0:1:1221][0:1:1182]' + \
',time[' + str(index_second) + \
':1:' + str(index_second) + ']'
try:
dataset = open_url(url)
except:
print "Please check your url! Cannot access dataset."
sys.exit(0)
sst = dataset['mcsst'].mcsst
time1 = dataset['time']
lat = dataset['lat']
lon = dataset['lon']
return sst, time1, lat, lon
开发者ID:jian-cui,项目名称:pyocean,代码行数:31,代码来源:getdata.py
示例3: getcodar_ctl_id
def getcodar_ctl_id(model_option,url,datetime_wanted):
if model_option=='1':
dtime=open_url(url+'?time')
dd=dtime['time']
#print "This option has data from "+str(num2date(dd[0]+date2num(datetime.datetime(2009, 1, 1, 0, 0))))+" to "+str(num2date(dd[-1]+date2num(datetime.datetime(2009, 1, 1, 0, 0))))
print 'This option has data from '+dd[0].strftime("%B %d, %Y")+' to '+dd[-1] .strftime("%B %d, %Y")
id=datetime_wanted-date2num(datetime.datetime(2009, 1, 1, 0, 0))
id=str(int(id))
if model_option=='6':
dtime=open_url(url+'?time')
dd=dtime['time']
ddd=[]
#print 'This option has data from '+dd[0].strftime("%B %d, %Y")+' to '+dd[-1] .strftime("%B %d, %Y")
id=datetime_wanted-date2num(datetime.datetime(2006, 1, 1, 0, 0))
id=str(int(id))
else:
dtime=open_url(url+'?time')
dd=dtime['time']
ddd=[]
for i in list(dtime['time']):
i=round(i,7)
ddd.append(i)
#print "This option has data from "+str(num2date(dd[0]+date2num(datetime.datetime(2001, 1, 1, 0, 0))))+" to "+str(num2date(dd[-1]+date2num(datetime.datetime(2001, 1, 1, 0, 0))))
#print 'This option has data from '+num2date(dd[0]).strftime("%B %d, %Y")+' to '+num2date(dd[-1]).strftime("%B %d, %Y")
id=ml.find(np.array(ddd)==round(datetime_wanted-date2num(datetime.datetime(2001, 1, 1, 0, 0)),7))
for i in id:
id=str(i)
#print 'codar id is '+id
return id
开发者ID:xhx509,项目名称:drifter_header,代码行数:30,代码来源:sst_function.py
示例4: _load
def _load(self, filename, elements, debug=False):
"""Loads data from *.nc, *.p and OpenDap url"""
# Loading pickle file
if filename.endswith(".p"):
f = open(filename, "rb")
data = pkl.load(f)
self._origin_file = data["Origin"]
self.History = data["History"]
if debug:
print "Turn keys into attributs"
self.Grid = ObjectFromDict(data["Grid"])
self.Variables = ObjectFromDict(data["Variables"])
try:
if self._origin_file.startswith("http"):
# Look for file through OpenDAP server
print "Retrieving data through OpenDap server..."
self.Data = open_url(data["Origin"])
# Create fake attribut to be consistent with the rest of the code
self.Data.variables = self.Data
else:
self.Data = self._load_nc(data["Origin"])
except: # TR: need to precise the type of error here
print "the original *.nc file has not been found"
pass
# Loading netcdf file
elif filename.endswith(".nc"):
if filename.startswith("http"):
# Look for file through OpenDAP server
print "Retrieving data through OpenDap server..."
self.Data = open_url(filename)
# Create fake attribut to be consistent with the rest of the code
self.Data.variables = self.Data
else:
# Look for file locally
print "Retrieving data from " + filename + " ..."
self.Data = self._load_nc(filename)
# Metadata
text = "Created from " + filename
self._origin_file = filename
self.History = [text]
# Calling sub-class
print "Initialisation..."
try:
self.Grid = _load_grid(self.Data, elements, self.History, debug=self._debug)
self.Variables = _load_var(self.Data, elements, self.Grid, self.History, debug=self._debug)
except MemoryError:
print "---Data too large for machine memory---"
print "Tip: use ax or tx during class initialisation"
print "--- to use partial data"
raise
elif filename.endswith(".mat"):
raise PyseidonError("---Functionality not yet implemented---")
else:
raise PyseidonError("---Wrong file format---")
开发者ID:GrumpyNounours,项目名称:PySeidon,代码行数:56,代码来源:stationClass.py
示例5: get_neracoos_current_data
def get_neracoos_current_data(url,id_s,id_e,id_max_url): #get wind data from neracoos.
url1=url+'current_speed[0:1:'+id_max_url+'][0:1:0][0:1:0][0:1:0],current_direction[0:1:'+id_max_url+'][0:1:0][0:1:0][0:1:0],current_u[0:1:'+id_max_url+'][0:1:0][0:1:0][0:1:0],current_v[0:1:'+id_max_url+'][0:1:0][0:1:0][0:1:0]'
database_s=open_url(url1)['current_speed'][int(id_s):int(id_e)]
database_d=open_url(url1)['current_direction'][int(id_s):int(id_e)]
database_u=open_url(url1)['current_u'][int(id_s):int(id_e)]
database_v=open_url(url1)['current_v'][int(id_s):int(id_e)]
#lat=database_s['lat']
#lat=round(lat[0],2)
#lon=database_s['lon']
#lon=round(lon[0],2)
period=database_s['time']
speed=database_s['current_speed']
speed=speed[0:].tolist()
period=num2date(period[0:]+date2num(dt.datetime(1858, 11, 17, 0, 0)))
direction=database_d['current_direction']
direction=direction[0:].tolist()
u=database_u['current_u']
u=u[0:].tolist()
v=database_v['current_v']
v=v[0:].tolist()
period_str,current_all=[],[]
for i in range(len(period)): #convert format to list
period_str.append(dt.datetime.strftime(period[i],'%Y-%m-%d-%H-%M'))
current_all.append([round(speed[i][0][0][0],2),round(direction[i][0][0][0],2),round(u[i][0][0][0],2),round(v[i][0][0][0],2)])
current,u,v,direction=[],[],[],[]# figure out bad data and delete
for i in range(len(current_all)):
current.append(current_all[i][0])
direction.append(current_all[i][1])
u.append(current_all[i][2])
v.append(current_all[i][3])
id_bad=ml.find((np.array(current)>200) | (np.array(current)<-1)|(np.array(direction)<0)| (np.array(direction)>360)|(np.array(u)<-200)| (np.array(u)>200)|(np.array(v)<-200)| (np.array(v)>200))
#print id_bad
id_bad=list(id_bad)
id_bad.reverse()
for m in id_bad:
del period_str[m]
del current_all[m]
return period_str,current_all
开发者ID:jian-cui,项目名称:pyocean,代码行数:39,代码来源:neracoos_def.py
示例6: station_info
def station_info(station_id):
from pydap.client import open_url
url1 = 'http://dods.ndbc.noaa.gov/thredds/dodsC/data/stdmet/'+station_id+'/'+station_id+'h9999.nc'
#url1 = 'http://dods.ndbc.noaa.gov/thredds/dodsC/data/ocean/'+station_id+'/'+station_id+'o9999.nc' #Ocean Dta
url2 = 'http://dods.ndbc.noaa.gov/thredds/dodsC/data/stdmet/'+station_id+'/'+station_id+'.ncml'
#url2=''
try:
dataset = open_url(url1)
except:
try:
dataset = open_url(url2)
except:
print 'OPENDAP url not found: ' + station_id
return False
return station_info_details(station_id,dataset)
开发者ID:ooiepe,项目名称:tss-python,代码行数:15,代码来源:ndbc.py
示例7: test_timeout
def test_timeout(sequence_type_data):
"""Test that timeout works properly"""
TestDataset = DatasetType('Test')
TestDataset['sequence'] = sequence_type_data
TestDataset['byte'] = BaseType('byte', 0)
application = BaseHandler(TestDataset)
# Explictly add latency on the devel server
# to guarantee that it timeouts
def wrap_mocker(func):
def mock_add_latency(*args, **kwargs):
time.sleep(1e-1)
return func(*args, **kwargs)
return mock_add_latency
application = wrap_mocker(application)
with LocalTestServer(application) as server:
url = ("http://0.0.0.0:%s/" % server.port)
# test open_url
assert open_url(url) == TestDataset
with pytest.raises(HTTPError) as e:
open_url(url, timeout=1e-5)
assert 'Timeout' in str(e)
# test open_dods
with pytest.raises(HTTPError):
open_dods(url + '.dods?sequence', timeout=1e-5)
assert 'Timeout' in str(e)
# test sequenceproxy
dataset = open_url(url)
seq = dataset['sequence']
assert isinstance(seq.data, SequenceProxy)
# Change the timeout of the sequence proxy:
seq.data.timeout = 1e-5
with pytest.raises(HTTPError) as e:
next(seq.iterdata())
assert 'Timeout' in str(e)
# test baseproxy:
dat = dataset['byte']
assert isinstance(dat.data, BaseProxy)
# Change the timeout of the baseprox proxy:
dat.data.timeout = 1e-5
with pytest.raises(HTTPError) as e:
dat[:]
assert 'Timeout' in str(e)
开发者ID:jblarsen,项目名称:pydap,代码行数:48,代码来源:test_server_devel.py
示例8: read_opendap_index
def read_opendap_index(date=None,domain=None):
url = parse_url(date)
print url[0]
dataset = open_url(url[0])
lats = dataset['lat'][:]
lons = dataset['lon'][:]
pres = dataset['pressure'][:]/100. #[hPa]
lats_idx = np.where((lats>domain['latn'])&
(lats<domain['latx']))[0]
lons_idx = np.where((lons>360+domain['lonn'])&
(lons<360+domain['lonx']))[0]
if domain['preslvl'] is not None:
pres_idx = np.where(pres==domain['preslvl'])[0][0]
pres = pres[pres_idx]
else:
pres_idx = None
last = lats_idx[0]
laen = lats_idx[-1]+1
lost = lons_idx[0]
loen = lons_idx[-1]+1
latsnew = lats[lats_idx]
lonsnew = lons[lons_idx]-360
index={'last':last,'laen':laen,'lost':lost,'loen':loen,'plvl':pres_idx}
coords={'lats':latsnew,'lons':lonsnew,'pres':pres}
return index,coords
开发者ID:rvalenzuelar,项目名称:pythonx,代码行数:34,代码来源:download_opendap_cfsr.py
示例9: __init__
def __init__(self, url):
""" Constructor, reads in a file from the url provided
@param url: OpenNDAP url for the NetCDF file
"""
self.url = url
# Let's open the file using OpenNDAP
f = open_url(url)
# Pull out the attributes, and column names
self.attributes = f.attributes['NC_GLOBAL']
self.columns = f.keys()
if 'time' in f:
# There's a time dimension in this dataset, so let's grab the possible
# values out
try:
time_units = f['time'].attributes['units']
self.time_values = [parse(t, time_units) for t in f['time']]
except IndexError:
# Not a parseable time format
pass
del f
开发者ID:NERC-CEH,项目名称:jules-jasmin,代码行数:25,代码来源:netcdf.py
示例10: getemolt_sensor
def getemolt_sensor(mindtime1,maxdtime1,i_mindepth,i_maxdepth,site2,mindtime,maxdtime):
#According to the conditions to select data from "emolt_sensor"
url2="http://gisweb.wh.whoi.edu:8080/dods/whoi/emolt_sensor?emolt_sensor.SITE,emolt_sensor.TIME_LOCAL,emolt_sensor.YRDAY0_LOCAL,emolt_sensor.TEMP,emolt_sensor.DEPTH_I&emolt_sensor.TIME_LOCAL>="+str(mindtime1)+"&emolt_sensor.TIME_LOCAL<="\
+str(maxdtime1)+"&emolt_sensor.DEPTH_I>="+str(i_mindepth)+"&emolt_sensor.DEPTH_I<="+str(i_maxdepth)+site2
try:
dataset1=open_url(url2)
except:
print 'Sorry, '+url2+' not available'
sys.exit(0)
emolt_sensor=dataset1['emolt_sensor']
try:
sites2=list(emolt_sensor['SITE'])
except:
print "'Sorry, According to your input, here are no value. please check it! ' "
sys.exit(0)
#sites2=list(emolt_sensor['SITE'])
time=list(emolt_sensor['TIME_LOCAL'])
yrday0=list(emolt_sensor['YRDAY0_LOCAL'])
temp=list(emolt_sensor['TEMP'])
depth1=list(emolt_sensor['DEPTH_I'])
time1,temp1,yrday01,sites1,depth=[],[],[],[],[]
for m in range(len(time)):
#if mindtime<=dt.datetime.strptime(str(time[m]),'%Y-%m-%d')<=maxdtime:
if date2num(mindtime)<=yrday0[m]%1+date2num(dt.datetime.strptime(str(time[m]),'%Y-%m-%d'))<=date2num(maxdtime):
#if str(time[m])=='2012-01-01':
temp1.append(temp[m])
yrday01.append(yrday0[m]%1+date2num(dt.datetime.strptime(str(time[m]),'%Y-%m-%d')))
sites1.append(sites2[m])
time1.append(date2num(dt.datetime.strptime(str(time[m]),'%Y-%m-%d')))
depth.append(depth1[m])
#print len(temp1)
return time1,yrday01,temp1,sites1,depth,
开发者ID:jian-cui,项目名称:pyocean,代码行数:35,代码来源:hx.py
示例11: __init__
def __init__(self, day, month, year, lat, lon, ndays):
self.day = day
self.month = month
self.year = year
self.lat = lat
self.lon = lon
self.ndays = ndays
self.datetime_date = datetime.date(self.year, self.month, self.day)
path = self.construct_path('http://goldsmr2.sci.gsfc.nasa.gov:80/opendap/MERRA/MAT1NXFLX.5.2.0', 'MERRA300.prod.assim.tavg1_2d_flx_Nx.', 0)
dataset = open_url(path)
self.native_lat = dataset['YDim'][:]
self.native_lon = dataset['XDim'][:]
self.lat_idx_native = np.where(np.min(np.abs(self.native_lat - lat)) == np.abs(self.native_lat - lat))[0][0]
self.lon_idx_native = np.where(np.min(np.abs(self.native_lon - lon)) == np.abs(self.native_lon - lon))[0][0]
self.lat_actual = self.native_lat[self.lat_idx_native]
self.lon_actual = self.native_lon[self.lon_idx_native]
return
开发者ID:pressel,项目名称:ForcingExtractor,代码行数:28,代码来源:merra.py
示例12: test_Functions
def test_Functions(self):
dataset = open_url('http://localhost:8001/')
rain = dataset.rain
self.assertEqual(rain.rain.shape, (2, 3))
functions = Functions('http://localhost:8001/')
dataset = functions.mean(rain, 0)
self.assertEqual(dataset.rain.rain.shape, (3,))
np.testing.assert_array_equal(dataset.rain.rain.data,
np.array([1.5, 2.5, 3.5]))
dataset = functions.mean(rain, 0)
self.assertEqual(dataset['rain']['rain'].shape, (3,))
np.testing.assert_array_equal(dataset.rain.rain.data,
np.array([1.5, 2.5, 3.5]))
dataset = functions.mean(rain, 1)
self.assertEqual(dataset.rain.rain.shape, (2,))
np.testing.assert_array_equal(dataset.rain.rain.data,
np.array([1.0, 4.0]))
dataset = functions.mean(rain, 1)
self.assertEqual(dataset['rain']['rain'].shape, (2,))
np.testing.assert_array_equal(dataset.rain.rain.data,
np.array([1.0, 4.0]))
dataset = functions.mean(functions.mean(rain, 0), 0)
self.assertEqual(dataset['rain']['rain'].shape, ())
np.testing.assert_array_equal(dataset.rain.rain.data,
np.array(2.5))
开发者ID:pacificclimate,项目名称:pydap-pdp,代码行数:29,代码来源:test_client.py
示例13: opendap_fetch
def opendap_fetch(cls, asset, date):
""" Get array proxy from OpenDap for this asset and date """
url = cls._assets[asset].get('url', '')
if url == '':
raise Exception("%s: URL not defined for asset %s" % (cls.__name__, asset))
success = False
for ver in ['100', '200', '300', '301', '400']:
if asset != "FRLAND":
f = cls._assets[asset]['source'] % (ver, date.year, date.month, date.day)
loc = "%s/%04d/%02d/%s" % (url, date.year, date.month, f)
else:
f = cls._assets[asset]['source'] % (ver, 0, 0, 0)
loc = "%s/1980/%s" % (url, f)
try:
with Timeout(30):
dataset = open_url(loc)
except Timeout.Timeout:
print "Timeout"
except Exception,e:
pass
else:
success = True
break
开发者ID:Applied-GeoSolutions,项目名称:gips,代码行数:25,代码来源:merra.py
示例14: fetch_data
def fetch_data(URL):
cnt = 0
while True:
if cnt > 10:
break
else:
try:
dataset = open_url(URL)
var = dataset['precipitation']
lon = np.array(dataset['nlon'])
lat = np.array(dataset['nlat'])
ind_LonMin = int(np.argmin(abs(lon-float(xmin))))
ind_LonMax = int(np.argmin(abs(lon-float(xmax))))
ind_LatMin = int(np.argmin(abs(lat-float(ymin))))
ind_LatMax = int(np.argmin(abs(lat-float(ymax))))
Data = var[ind_LonMin:ind_LonMax, ind_LatMin:ind_LatMax]
LLX = lon[ind_LonMin]
LLY = lat[ind_LatMin]
except:
cnt = cnt + 1
print 'fetch atempt ', cnt
continue
break
d = np.flipud(Data.T)
return d, LLX, LLY
开发者ID:jb0092,项目名称:brisflood,代码行数:26,代码来源:fetch_TRMM.py
示例15: get_coors
def get_coors(modelname, lo, la, lonc, latc, lon, lat, siglay, h, depth,startrecord, endrecord):
if lo>90:
[la,lo]=dm2dd(la,lo)
print 'la, lo',la, lo
latd,lond=[la],[lo]
# kf,distanceF=nearlonlat(lonc,latc,lo,la) # nearest triangle center F - face
# kv,distanceV=nearlonlat(lon,lat,lo,la)
kf,distanceF = nearest_point_index(lo,la,lonc,latc,num=1)
kv,distanceV = nearest_point_index(lo,la,lon,lat,num=1)
kf = kf[0][0]
kv = kv[0][0]
print 'kf:', kf
if h[kv] < 0:
print 'Sorry, your position is on land, please try another point'
sys.exit()
depthtotal=siglay[:,kv]*h[kv]
layer=np.argmin(abs(depthtotal-depth))
for i in range(startrecord,endrecord):# !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
############read the particular time model from website#########
# print 'la, lo, i', la, lo, i
timeurl='['+str(i)+':1:'+str(i)+']'
uvposition=str([layer])+str([kf])
data_want = ('u'+timeurl+uvposition, 'v'+timeurl+uvposition)
# if urlname=="30yr":
# url='http://www.smast.umassd.edu:8080/thredds/dodsC/fvcom/hindcasts/30yr_gom3?'+\
# 'Times'+timeurl+',u'+timeurl+uvposition+','+'v'+timeurl+uvposition
# elif urlname == "GOM3":
# url="http://www.smast.umassd.edu:8080/thredds/dodsC/FVCOM/NECOFS/Forecasts/NECOFS_GOM3_FORECAST.nc?"+\
# 'Times'+timeurl+',u'+timeurl+uvposition+','+'v'+timeurl+uvposition
# else:
# url="http://www.smast.umassd.edu:8080/thredds/dodsC/FVCOM/NECOFS/Forecasts/NECOFS_FVCOM_OCEAN_MASSBAY_FORECAST.nc?"+\
# 'Times'+timeurl+',u'+timeurl+uvposition+','+'v'+timeurl+uvposition
url = url_with_time_position(modelname, data_want)
dataset = open_url(url)
u=np.array(dataset['u'])
v=np.array(dataset['v'])
print 'u, v, i', u[0,0,0], v[0,0,0],i
################get the point according the position###################
par_u=u[0,0,0]
par_v=v[0,0,0]
xdelta=par_u*60*60 #get_coors
ydelta=par_v*60*60
latdelta=ydelta/111111
londelta=(xdelta/(111111*np.cos(la*np.pi/180)))
la=la+latdelta
lo=lo+londelta
latd.append(la)
lond.append(lo)
# kf,distanceF=nearlonlat(lonc,latc,lo,la) # nearest triangle center F - face
# kv,distanceV=nearlonlat(lon,lat,lo,la)# nearest triangle vertex V - vertex
kf,distanceF = nearest_point_index(lo,la,lonc,latc,num=1)
kv,distanceV = nearest_point_index(lo,la,lon,lat,num=1)
kf, kv = kf[0][0], kv[0][0]
depthtotal=siglay[:,kv]*h[kv]
# layer=np.argmin(abs(depthtotal-depth))
if distanceV>=0.3:
if i==startrecord:
print 'Sorry, your start position is NOT in the model domain'
break
return latd ,lond
开发者ID:Particles-in-Motion,项目名称:web_track,代码行数:60,代码来源:gettrack-test.py
示例16: load
def load(url, variable):
'''Load a Dataset from an OpenDAP URL
:param url: The OpenDAP URL for the dataset of interest.
:type url: String
:param variable: The name of the variable to read from the dataset.
:type variable: String
:returns: A Dataset object containing the dataset pointed to by the
OpenDAP URL.
:raises: ServerError
'''
# Grab the dataset information and pull the appropriate variable
d = open_url(url)
dataset = d[variable]
# Grab the lat, lon, and time variable names.
# We assume the variable order is (time, lat, lon)
dataset_dimensions = dataset.dimensions
time = dataset_dimensions[0]
lat = dataset_dimensions[1]
lon = dataset_dimensions[2]
# Time is given to us in some units since an epoch. We need to convert
# these values to datetime objects. Note that we use the main object's
# time object and not the dataset specific reference to it. We need to
# grab the 'units' from it and it fails on the dataset specific object.
times = np.array(_convert_times_to_datetime(d[time]))
lats = np.array(dataset[lat][:])
lons = np.array(dataset[lon][:])
values = np.array(dataset[:])
return Dataset(lats, lons, times, values, variable)
开发者ID:cgoodale,项目名称:climate,代码行数:35,代码来源:dap.py
示例17: get_data_from_opendap
def get_data_from_opendap(self, x, y, start=None, end=None):
"""
Return list of dicts for data at x, y.
Start, end are datetimes, and default to the first and last
datetime in the file.
"""
try:
dataset = client.open_url(self.url)
except ServerError:
return []
index_start = 0
if start is not None:
index_start = self.index(start)
if end is None:
index_end = self.timesteps - 1
else:
index_end = self.index(end)
precipitation = dataset['precipitation']['precipitation']
tuples = zip(
iter(self._get_datetime_generator(start=index_start,
end=index_end)),
precipitation[y, x, index_start: index_end + 1][0, 0, :],
)
return [dict(unit='mm/5min', datetime=d, value=float(p))
for d, p in tuples
if not p == config.NODATAVALUE]
开发者ID:jaapschellekens,项目名称:openradar,代码行数:32,代码来源:products.py
示例18: fetch
def fetch(cls, asset, tile, date):
""" Get this asset for this tile and date (using OpenDap service) """
url = cls._assets[asset].get('url', '') % (date.year, tile, date.year)
source = cls._assets[asset]['source']
loc = "%s/%s" % (url, source)
print loc
dataset = open_url(loc)
x0 = dataset['x'].data[0] - 500.0
y0 = dataset['y'].data[0] + 500.0
day = date.timetuple().tm_yday
iday = day - 1
data = np.array(dataset[asset][iday, :, :]).squeeze().astype('float32')
ysz, xsz = data.shape
description = cls._assets[asset]['description']
meta = {'ASSET': asset, 'TILE': tile, 'DATE': str(date.date()), 'DESCRIPTION': description}
sday = str(day).zfill(3)
fout = os.path.join(cls.Repository.path('stage'), "daymet_%s_%s_%4d%s.tif" % (asset, tile, date.year, sday))
geo = [float(x0), cls._defaultresolution[0], 0.0, float(y0), 0.0, -cls._defaultresolution[1]]
geo = np.array(geo).astype('double')
dtype = create_datatype(data.dtype)
imgout = gippy.GeoImage(fout, xsz, ysz, 1, dtype)
imgout.SetBandName(asset, 1)
imgout.SetNoData(-9999.)
imgout.SetProjection(PROJ)
imgout.SetAffine(geo)
imgout[0].Write(data)
开发者ID:Applied-GeoSolutions,项目名称:gips,代码行数:26,代码来源:daymet.py
示例19: get_last_forecast
def get_last_forecast(self,url):
now = datetime.datetime.now(pytz.utc)
# it takes 5.5 hours before data arrive on GrADS site
last = now + datetime.timedelta(hours = -6)
for _ in range(6):
# forecasts are on h=[0,6,12]
h = min(int(last.hour/6),2)*6
last = last.replace(hour = h)
hour = '%02d' % last.hour
date = now.strftime('%Y%m%d')
url = url.format(date=date,hour=hour)
try:
logger.debug('querying '+url)
dataset = open_url(url)
logger.debug('Forecast found for date={date}, hour={hour}'.format(date=date,hour=hour))
return dataset
except Exception as e:
logger.warning('Forecast not found: %s' % e)
# try previous forecast
last = last + datetime.timedelta(hours = -6)
logger.error('No GEFS forecast found')
return None
开发者ID:acaciawater,项目名称:acaciadata,代码行数:26,代码来源:noaa.py
示例20: test_lazy_evaluation_getattr
def test_lazy_evaluation_getattr(ssf_app):
"""Test that the dataset is only loaded when accessed."""
original = open_url('/', application=ssf_app)
dataset = original.functions.mean(original.SimpleGrid, 0)
assert dataset.dataset is None
dataset.SimpleGrid
assert dataset.dataset is not None
开发者ID:jblarsen,项目名称:pydap,代码行数:7,代码来源:test_client.py
注:本文中的pydap.client.open_url函数示例由纯净天空整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。 |
请发表评论