Skip to content

Commit

Permalink
Merge pull request #78 from emiliom/hads
Browse files Browse the repository at this point in the history
hads collector update to support unverified SSL certs
  • Loading branch information
daf authored Mar 30, 2017
2 parents e3f8479 + bf2a162 commit 29fabd9
Showing 1 changed file with 22 additions and 10 deletions.
32 changes: 22 additions & 10 deletions pyoos/collectors/hads/hads.py
Original file line number Diff line number Diff line change
Expand Up @@ -84,23 +84,23 @@ def list_features(self):

return station_codes

def collect(self):
def collect(self, **kwargs):
var_filter = None
if hasattr(self, '_variables'):
var_filter = self._variables

time_extents = (self.start_time if hasattr(self, 'start_time') else None, self.end_time if hasattr(self, 'end_time') else None)

metadata, raw_data = self.raw()
metadata, raw_data = self.raw(**kwargs)
return self.parser.parse(metadata, raw_data, var_filter, time_extents)

def raw(self, format=None):
def raw(self, format=None, **kwargs):
"""
Returns a tuple of (metadata, raw data)
"""
station_codes = self._apply_features_filter(self._get_station_codes())
metadata = self._get_metadata(station_codes)
raw_data = self._get_raw_data(station_codes)
metadata = self._get_metadata(station_codes, **kwargs)
raw_data = self._get_raw_data(station_codes, **kwargs)

return (metadata, raw_data)

Expand All @@ -116,12 +116,18 @@ def _apply_features_filter(self, station_codes):

return station_codes

def _get_metadata(self, station_codes):
def _get_metadata(self, station_codes, **kwargs):
if 'verify' in kwargs:
verify_cert = kwargs['verify']
else:
verify_cert = True # the default for requests

resp = requests.post(self.metadata_url, data={'state' : 'nil',
'hsa' : 'nil',
'of' : '1',
'extraids' : " ".join(station_codes),
'data' : "Get Meta Data"})
'data' : "Get Meta Data"},
verify=verify_cert)
resp.raise_for_status()
return resp.text

Expand Down Expand Up @@ -155,7 +161,7 @@ def _get_station_codes(self, force=False):
self.station_codes.extend(self._get_stations_for_state(state_url))

if self.bbox:
# retreive metadata for all stations to properly filter them
# retrieve metadata for all stations to properly filter them
metadata = self._get_metadata(self.station_codes)
parsed_metadata = self.parser._parse_metadata(metadata)

Expand All @@ -178,7 +184,12 @@ def _get_stations_for_state(self, state_url):
state_root = BeautifulSoup(requests.get(state_url).text)
return [x for x in [x.attrs['href'].split("nesdis_id=")[-1] for x in state_root.find_all('a')] if len(x) > 0]

def _get_raw_data(self, station_codes):
def _get_raw_data(self, station_codes, **kwargs):
if 'verify' in kwargs:
verify_cert = kwargs['verify']
else:
verify_cert = True # the default for requests

since = 7
if hasattr(self, 'start_time') and self.start_time is not None:
# calc delta between now and start_time
Expand All @@ -196,7 +207,8 @@ def _get_raw_data(self, station_codes):
'hsa' : 'nil',
'of' : '1',
'extraids' : " ".join(station_codes),
'sinceday' : since})
'sinceday' : since},
verify=verify_cert)
resp.raise_for_status()

return resp.text

0 comments on commit 29fabd9

Please sign in to comment.