本文整理汇总了Python中pyiso.LOGGER类的典型用法代码示例。如果您正苦于以下问题:Python LOGGER类的具体用法?Python LOGGER怎么用?Python LOGGER使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了LOGGER类的20个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Python代码示例。
示例1: latest_fuel_mix
def latest_fuel_mix(self):
# set up request
url = self.base_url + '/ria/FuelMix.aspx?CSV=True'
# carry out request
response = self.request(url)
if not response:
return pd.DataFrame()
# test for valid content
if 'The page cannot be displayed' in response.text:
LOGGER.error('MISO: Error in source data for generation')
return pd.DataFrame()
# preliminary parsing
df = pd.read_csv(BytesIO(response.content), header=0, index_col=0, parse_dates=True)
# set index
df.index = self.utcify_index(df.index)
df.index.set_names(['timestamp'], inplace=True)
# set names and labels
df['fuel_name'] = df.apply(lambda x: self.fuels[x['CATEGORY']], axis=1)
df['gen_MW'] = df['ACT']
# return
return df[['fuel_name', 'gen_MW']]
开发者ID:avances123,项目名称:pyiso,代码行数:27,代码来源:miso.py
示例2: handle_options
def handle_options(self, **kwargs):
"""
Process and store keyword argument options.
"""
super(EIAClient, self).handle_options(**kwargs)
if not hasattr(self, 'BA'):
LOGGER.error('Balancing authority not set.')
raise ValueError('Balancing authority not set.')
if 'market' not in self.options:
if self.options['forecast']:
self.options['market'] = self.MARKET_CHOICES.dam
elif self.options['sliceable'] and self.options['data'] == 'gen':
self.options['market'] = self.MARKET_CHOICES.dam
else:
self.options['market'] = self.MARKET_CHOICES.hourly
if 'freq' not in self.options:
if self.options['forecast']:
self.options['freq'] = self.FREQUENCY_CHOICES.hourly
elif self.options['sliceable'] and self.options['data'] == 'gen':
self.options['freq'] = self.FREQUENCY_CHOICES.hourly
else:
self.options['freq'] = self.FREQUENCY_CHOICES.hourly
if 'yesterday' not in self.options:
self.options['yesterday'] = False
开发者ID:WattTime,项目名称:pyiso,代码行数:26,代码来源:eia_esod.py
示例3: fetch_forecast
def fetch_forecast(self, date):
# construct url
datestr = date.strftime('%Y%m%d')
url = self.base_url + '/Library/Repository/Market%20Reports/' + datestr + '_da_ex.xls'
# make request with self.request for easier debugging, mocking
response = self.request(url)
if not response:
return pd.DataFrame()
if response.status_code == 404:
LOGGER.debug('No MISO forecast data available at %s' % datestr)
return pd.DataFrame()
xls = pd.read_excel(BytesIO(response.content))
# clean header
header_df = xls.iloc[:5]
df = xls.iloc[5:]
df.columns = ['hour_str'] + list(header_df.iloc[-1][1:])
# set index
idx = []
for hour_str in df['hour_str']:
# format like 'Hour 01' to 'Hour 24'
ihour = int(hour_str[5:]) - 1
local_ts = datetime(date.year, date.month, date.day, ihour)
idx.append(self.utcify(local_ts))
df.index = idx
df.index.set_names(['timestamp'], inplace=True)
# return
return df
开发者ID:avances123,项目名称:pyiso,代码行数:32,代码来源:miso.py
示例4: _dst_active_hours_for_transition_day
def _dst_active_hours_for_transition_day(self, local_dt_index):
"""
When attempting to localize a timezone-naive list of dates, the daylight savings status may be ambigous. This
method is meant as a fallback when the ambiguous='infer' datetime handling in pandas fails. It assumes
that the datetime index is a daylight saving transition day.
:param pandas.DatetimeIndex local_dt_index: A list of timezone-naive DatetimeIndex values.
:return: A list of bool values indicating whether daylight savings time is active for the list provided.
This returned list of boolean value is useful for passing to pandas 'ambiguous' kwarg.
:rtype: list
"""
dst_active_list = []
hour_idx = local_dt_index.hour
if len(hour_idx) > 3:
starting_timestamp = local_dt_index[0]
starting_month = starting_timestamp.month
starting_hour = starting_timestamp.hour
if starting_month == 3 and starting_hour == 0:
dst_active_list = [h > 1 for h in hour_idx]
elif starting_month == 11 and starting_hour == 0:
dst_active_list = [h < 2 for h in hour_idx]
elif 3 < starting_month < 11:
dst_active_list = [True for h in hour_idx]
elif starting_month < 3 or starting_month > 11:
dst_active_list = [False for h in hour_idx]
else:
LOGGER.warn("Uanble to infer fallback DST status for ambiguous DatetimeIndex values.")
return dst_active_list
开发者ID:WattTime,项目名称:pyiso,代码行数:29,代码来源:base.py
示例5: get_load
def get_load(self, latest=False, start_at=False, end_at=False,
forecast=False, **kwargs):
# set args
self.handle_options(data='load', latest=latest, forecast=forecast,
start_at=start_at, end_at=end_at, **kwargs)
# set up storage
raw_data = []
# collect raw data
for endpoint in self.request_endpoints():
# carry out request
data = self.fetch_data(endpoint, self.auth)
# pull out data
try:
raw_data += self.parse_json_load_data(data)
except ValueError as e:
LOGGER.warn(e)
continue
# parse data
try:
df = self._parse_json(raw_data)
except ValueError:
return []
df = self.slice_times(df)
# return
return self.serialize_faster(df, drop_index=True)
开发者ID:cnblevins,项目名称:pyiso,代码行数:30,代码来源:isone.py
示例6: get_lmp
def get_lmp(self, node_id='INTERNALHUB', latest=True, start_at=False, end_at=False, **kwargs):
# set args
self.handle_options(data='lmp', latest=latest,
start_at=start_at, end_at=end_at, node_id=node_id, **kwargs)
# get location id
try:
locationid = self.locations[node_id.upper()]
except KeyError:
raise ValueError('No LMP data available for location %s' % node_id)
# set up storage
raw_data = []
# collect raw data
for endpoint in self.request_endpoints(locationid):
# carry out request
data = self.fetch_data(endpoint, self.auth)
# pull out data
try:
raw_data += self.parse_json_lmp_data(data)
except ValueError as e:
LOGGER.warn(e)
continue
# parse and slice
df = self._parse_json(raw_data)
df = self.slice_times(df)
# return
return df.to_dict(orient='record')
开发者ID:cnblevins,项目名称:pyiso,代码行数:30,代码来源:isone.py
示例7: fetch_forecast
def fetch_forecast(self, date):
# construct url
datestr = date.strftime("%Y%m%d")
url = self.base_url + "/Library/Repository/Market%20Reports/" + datestr + "_da_ex.xls"
# make request
try:
xls = pd.read_excel(url)
except HTTPError:
LOGGER.debug("No MISO forecast data available at %s" % datestr)
return pd.DataFrame()
# clean header
header_df = xls.iloc[:5]
df = xls.iloc[5:]
df.columns = ["hour_str"] + list(header_df.iloc[-1][1:])
# set index
idx = []
for hour_str in df["hour_str"]:
# format like 'Hour 01' to 'Hour 24'
ihour = int(hour_str[5:]) - 1
local_ts = datetime(date.year, date.month, date.day, ihour)
idx.append(self.utcify(local_ts))
df.index = idx
df.index.set_names(["timestamp"], inplace=True)
# return
return df
开发者ID:simon71717,项目名称:pyiso,代码行数:29,代码来源:miso.py
示例8: request
def request(self, *args, **kwargs):
response = super(PJMClient, self).request(*args, **kwargs)
if response and response.status_code == 400:
LOGGER.warn('PJM request returned Bad Request %s' % response)
return None
return response
开发者ID:cnblevins,项目名称:pyiso,代码行数:7,代码来源:pjm.py
示例9: get_trade
def get_trade(self, latest=False,
start_at=False, end_at=False, **kwargs):
# set args
self.handle_options(data='trade', latest=latest,
start_at=start_at, end_at=end_at, **kwargs)
# set up storage
parsed_data = []
# collect data
for this_date in self.dates():
# fetch
try:
df, mode = self.fetch_df(this_date)
except (HTTPError, ValueError):
LOGGER.warn('No data available in NVEnergy at %s' % this_date)
continue
# store
try:
parsed_data += self.parse_trade(df, this_date, mode)
except KeyError:
LOGGER.warn('Unparseable data available in NVEnergy at %s: %s' % (this_date, df))
continue
# return
return self.time_subset(parsed_data)
开发者ID:WattTime,项目名称:pyiso,代码行数:27,代码来源:nvenergy.py
示例10: latest_fuel_mix
def latest_fuel_mix(self):
# set up request
url = self.base_url + "/ria/FuelMix.aspx?CSV=True"
# carry out request
response = self.request(url)
if not response:
return pd.DataFrame()
# test for valid content
if "The page cannot be displayed" in response.text:
LOGGER.error("MISO: Error in source data for generation")
return pd.DataFrame()
# preliminary parsing
df = pd.read_csv(StringIO(response.text), header=0, index_col=0, parse_dates=True)
# set index
df.index = self.utcify_index(df.index)
df.index.set_names(["timestamp"], inplace=True)
# set names and labels
df["fuel_name"] = df.apply(lambda x: self.fuels[x["CATEGORY"]], axis=1)
df["gen_MW"] = df["ACT"]
# return
return df[["fuel_name", "gen_MW"]]
开发者ID:simon71717,项目名称:pyiso,代码行数:27,代码来源:miso.py
示例11: utcify_index
def utcify_index(self, local_index, tz_name=None):
"""
Convert a DateTimeIndex to UTC.
:param DateTimeIndex local_index: The local DateTimeIndex to be converted.
:param string tz_name: If local_ts is naive, it is assumed to be in timezone tz.
If tz is not provided, the client's default timezone is used.
:return: DatetimeIndex in UTC.
:rtype: DatetimeIndex
"""
# set up tz
if tz_name is None:
tz_name = self.TZ_NAME
# localize
try:
aware_local_index = local_index.tz_localize(tz_name)
except AmbiguousTimeError as e:
LOGGER.debug(e)
aware_local_index = local_index.tz_localize(tz_name, ambiguous='infer')
# except Exception as e:
# LOGGER.debug(e) # already aware
# print e
# aware_local_index = local_index
# convert to utc
aware_utc_index = aware_local_index.tz_convert('UTC')
# return
return aware_utc_index
开发者ID:simon71717,项目名称:pyiso,代码行数:30,代码来源:base.py
示例12: fetch_csvs
def fetch_csvs(self, date, label):
# construct url
datestr = date.strftime('%Y%m%d')
if self.options['data'] == 'lmp':
url = '%s/%s/%s%s_zone.csv' % (self.base_url, label, datestr, label)
else:
url = '%s/%s/%s%s.csv' % (self.base_url, label, datestr, label)
# make request
response = self.request(url)
# if 200, return
if response and response.status_code == 200:
return [response.text]
# if failure, try zipped monthly data
datestr = date.strftime('%Y%m01')
if self.options['data'] == 'lmp':
url = '%s/%s/%s%s_zone_csv.zip' % (self.base_url, label, datestr, label)
else:
url = '%s/%s/%s%s_csv.zip' % (self.base_url, label, datestr, label)
# make request and unzip
response_zipped = self.request(url)
if response_zipped:
unzipped = self.unzip(response_zipped.content)
else:
return []
# return
if unzipped:
LOGGER.info('Failed to find daily %s data for %s but found monthly data, using that' % (self.options['data'], date))
return unzipped
else:
return []
开发者ID:cnblevins,项目名称:pyiso,代码行数:35,代码来源:nyiso.py
示例13: fetch_todays_outlook_renewables
def fetch_todays_outlook_renewables(self):
# get renewables data
response = self.request(self.base_url_outlook+'renewables.html')
try:
return BeautifulSoup(response.content)
except AttributeError:
LOGGER.warn('No response for CAISO today outlook renewables')
return None
开发者ID:avances123,项目名称:pyiso,代码行数:8,代码来源:caiso.py
示例14: get_load
def get_load(self, latest=False, start_at=None, end_at=None, forecast=False, **kwargs):
# set args
self.handle_options(data='load', latest=latest,
start_at=start_at, end_at=end_at, forecast=forecast,
**kwargs)
if self.options['forecast']:
# fetch from eData
df = self.fetch_edata_series('ForecastedLoadHistory', {'name': 'PJM RTO Total'})
sliced = self.slice_times(df)
sliced.columns = ['load_MW']
# format
extras = {
'freq': self.FREQUENCY_CHOICES.hourly,
'market': self.MARKET_CHOICES.dam,
'ba_name': self.NAME,
}
data = self.serialize_faster(sliced, extras=extras)
# return
return data
elif self.options['end_at'] and self.options['end_at'] < datetime.now(pytz.utc) - timedelta(hours=1):
df = self.fetch_historical_load(self.options['start_at'].year)
sliced = self.slice_times(df)
# format
extras = {
'freq': self.FREQUENCY_CHOICES.hourly,
'market': self.MARKET_CHOICES.dam,
'ba_name': self.NAME,
}
data = self.serialize_faster(sliced, extras=extras)
# return
return data
else:
# handle real-time
load_ts, load_val = self.fetch_edata_point('InstantaneousLoad', 'PJM RTO Total', 'MW')
# fall back to OASIS
if not (load_ts and load_val):
load_ts, load_val = self.fetch_oasis_data()
if not (load_ts and load_val):
LOGGER.warn('No PJM latest load data')
return []
# format and return
return [{
'timestamp': load_ts,
'freq': self.FREQUENCY_CHOICES.fivemin,
'market': self.MARKET_CHOICES.fivemin,
'load_MW': load_val,
'ba_name': self.NAME,
}]
开发者ID:cnblevins,项目名称:pyiso,代码行数:57,代码来源:pjm.py
示例15: time_from_soup
def time_from_soup(self, soup):
"""
Returns a UTC timestamp if one is found in the soup,
or None if an error was encountered.
"""
ts_elt = soup.find(class_='ts')
if not ts_elt:
LOGGER.error('PJM: Timestamp not found in soup:\n%s' % soup)
return None
return self.utcify(ts_elt.string)
开发者ID:simon71717,项目名称:pyiso,代码行数:10,代码来源:pjm.py
示例16: fetch_oasis
def fetch_oasis(self, payload={}, return_all_files=False):
"""
Returns a list of report data elements, or an empty list if an error was encountered.
If return_all_files=False, returns only the content from the first file in the .zip -
this is the default behavior and was used in earlier versions of this function.
If return_all_files=True, will return an array representing the content from each file.
This is useful for processing LMP data or other fields where multiple price components are returned in a zip.
"""
# set up storage
raw_data = []
if return_all_files is True:
default_return_val = []
else:
default_return_val = ''
# try get
response = self.request(self.base_url_oasis, params=payload)
if not response:
return default_return_val
# read data from zip
# This will be an array of content if successful, and None if unsuccessful
content = self.unzip(response.content)
if not content:
return default_return_val
# check xml content for errors
soup = BeautifulSoup(content[0], 'lxml')
error = soup.find('m:error')
if error:
code = error.find('m:err_code')
desc = error.find('m:err_desc')
msg = 'XML error for CAISO OASIS with payload %s: %s %s' % (payload, code, desc)
LOGGER.error(msg)
return default_return_val
# return xml or csv data
if payload.get('resultformat', False) == 6:
# If we requested CSV files
if return_all_files:
return content
else:
return content[0]
else:
# Return XML content
if return_all_files:
raw_data = [BeautifulSoup(thisfile).find_all('report_data') for thisfile in content]
return raw_data
else:
raw_data = soup.find_all('report_data')
return raw_data
开发者ID:ecalifornica,项目名称:pyiso,代码行数:54,代码来源:caiso.py
示例17: handle_ba_limitations
def handle_ba_limitations(self):
"""Handle BA limitations"""
today = pytz.utc.localize(datetime.utcnow()).astimezone(pytz.timezone(self.TZ_NAME))
two_days_ago = today - timedelta(days=2)
load_not_supported_bas = ['DEAA', 'EEI', 'GRIF', 'GRMA', 'GWA',
'HGMA', 'SEPA', 'WWA', 'YAD']
delay_bas = ['AEC', 'DOPD', 'GVL', 'HST', 'NSB', 'PGE', 'SCL',
'TAL', 'TIDC', 'TPWR']
canada_mexico = ['IESO', 'BCTC', 'MHEB', 'AESO', 'HQT', 'NBSO',
'CFE', 'SPC']
if self.BA in delay_bas:
if self.options['end_at'] and self.options['end_at'] > two_days_ago:
LOGGER.error('No data for %s due to 2 day delay' % self.BA)
raise ValueError('No data: 2 day delay for this BA.')
elif self.options['yesterday']:
LOGGER.error('No data for %s due to 2 day delay' % self.BA)
raise ValueError('No data: 2 day delay for this BA.')
elif self.options['forecast']:
raise ValueError('No data: 2 day delay for this BA.')
if self.BA in load_not_supported_bas:
if self.options['data'] == 'load':
LOGGER.error('Load data not supported for %s' % self.BA)
raise ValueError('Load data not supported for this BA.')
if self.BA in canada_mexico:
LOGGER.error('Data not supported for %s' % self.BA)
raise ValueError('Data not currently supported for Canada and Mexico')
开发者ID:WattTime,项目名称:pyiso,代码行数:27,代码来源:eia_esod.py
示例18: _assert_entries_1hr_apart
def _assert_entries_1hr_apart(self, result_ts):
prev_entry = None
for entry in result_ts:
if prev_entry:
seconds_delta = (entry['timestamp'] - prev_entry['timestamp']).total_seconds()
if seconds_delta > 3600:
LOGGER.error('prev_entry timestamp: ' + str(
prev_entry['timestamp'].astimezone(pytz.timezone(self.nbpower_client.TZ_NAME))
))
LOGGER.error('entry timestamp: ' + str(
entry['timestamp'].astimezone(pytz.timezone(self.nbpower_client.TZ_NAME))
))
self.assertEqual(3600, seconds_delta)
prev_entry = entry
开发者ID:WattTime,项目名称:pyiso,代码行数:14,代码来源:integration_test_nbpower.py
示例19: get_lmp
def get_lmp(self, node_id, latest=True, start_at=False, end_at=False, **kwargs):
# set args
self.handle_options(data='lmp', latest=latest,
start_at=start_at, end_at=end_at, **kwargs)
# get location id
try:
locationid = self.locations[node_id.upper()]
except KeyError:
raise ValueError('No LMP data available for location %s' % node_id)
# set up storage
raw_data = []
parsed_data = []
# collect raw data
for endpoint in self.request_endpoints(locationid):
# carry out request
data = self.fetch_data(endpoint, self.auth)
# pull out data
try:
raw_data += self.parse_json_lmp_data(data)
except ValueError as e:
LOGGER.warn(e)
continue
# parse data
for raw_dp in raw_data:
# set up storage
parsed_dp = {}
# add values
parsed_dp['timestamp'] = self.utcify(raw_dp['BeginDate'])
parsed_dp['lmp'] = raw_dp['LmpTotal']
parsed_dp['ba_name'] = self.NAME
parsed_dp['market'] = self.options['market']
parsed_dp['freq'] = self.options['frequency']
parsed_dp['node_id'] = node_id
parsed_dp['lmp_type'] = 'energy'
# add to full storage
to_store = True
if self.options['sliceable']:
if self.options['start_at'] > parsed_dp['timestamp'] or self.options['end_at'] < parsed_dp['timestamp']:
to_store = False
if to_store:
parsed_data.append(parsed_dp)
return parsed_data
开发者ID:ecalifornica,项目名称:pyiso,代码行数:50,代码来源:isone.py
示例20: get_load
def get_load(self, latest=False, yesterday=False, start_at=False, end_at=False, **kwargs):
super(AESOClient, self).handle_options(latest=latest, yesterday=yesterday, start_at=start_at, end_at=end_at,
**kwargs)
if latest:
return self._get_latest_report(request_type=ParserFormat.load)
elif self.options.get('start_at', None) and self.options.get('end_at', None):
earliest_load_dt = self.mtn_tz.localize(datetime(year=2000, month=1, day=1, hour=0, minute=0, second=0))
latest_load_dt = self.local_now().replace(hour=23, minute=59, second=59, microsecond=999999)
start_at = max(self.options['start_at'], earliest_load_dt).astimezone(self.mtn_tz)
end_at = min(self.options['end_at'], latest_load_dt).astimezone(self.mtn_tz)
return self._get_load_for_date_range(start_at=start_at, end_at=end_at)
else:
LOGGER.warn('No valid options were supplied.')
开发者ID:WattTime,项目名称:pyiso,代码行数:14,代码来源:aeso.py
注:本文中的pyiso.LOGGER类示例由纯净天空整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。 |
请发表评论