diff --git a/config/haste_storage_client_config.json b/config/haste_storage_client_config.json index 5d14421..902f1b3 100644 --- a/config/haste_storage_client_config.json +++ b/config/haste_storage_client_config.json @@ -9,5 +9,8 @@ "user_domain_name": "xxxx", "auth_url": "xxxxx", "project_domain_name": "xxxx" + }, + "local_storage": { + "folder": "/ubuntu/haste_storage" } } diff --git a/examples/timeseries.py b/examples/timeseries.py new file mode 100644 index 0000000..72bf42c --- /dev/null +++ b/examples/timeseries.py @@ -0,0 +1,41 @@ +""" Example of usage, high-level API. """ + +import haste +from haste.hastestorageclient import HasteStorageClient + +import numpy + + +class HasteCollection: + """ Collection of tuples """ + +class Experiment(HasteCollection): + +class TimeSeries: + """ Encodes a time series object. """ + + def __init__(self): + +# The storage client will be used to handle data +sc = HasteStorageClient() + +# Create an experiment +# name should probably be unique, and map directly to the "root stream id" +E = haste.Experiment(storage_client=sc) + +# Add a time series to the experiment +ts = haste.TimeSeries() + +# By adding the ts to the experiment, it will be assigned a unique substream-id, +# linked somehow to the root stream id of the experiment. +E.add(ts) + +# Now add (time, spatial_data_frame) tuples to the timeseries. +# They should be automatically handled by the storage client (i.e. passed through feature exraction, classification and policy-evaluation) +tspan = numpy.linspace(0,10.0,100) +for t in tspan: + data = 100*["Large spatial dataframe goes here."] + ts.append((t,data)) + + + diff --git a/haste_storage_client/storage.py b/haste_storage_client/storage.py index d22bf6d..a7f4657 100644 --- a/haste_storage_client/storage.py +++ b/haste_storage_client/storage.py @@ -18,6 +18,15 @@ def close(self): # The auth token expires after 24 hours by default, but refresh more frequently: OS_SWIFT_CONN_MAX_LIFETIME_SECONDS = 60 * 60 +class LocalStorage(Storage): + + def __init__(self, config): + self.config = config + + def save_blob(self, blob_bytes, blob_id): + with open("{0}/{1}".format(config["folder"],blob_id),"w") as fh: + fh.write(blob_bytes) + class OsSwiftStorage(Storage): diff --git a/readme.md b/readme.md index 61ac720..82e50ea 100644 --- a/readme.md +++ b/readme.md @@ -5,7 +5,7 @@ For now, this simply calls the MongoDB and Swift Container clients. Python 3.x. ## Installation For installation in [development mode](https://setuptools.readthedocs.io/en/latest/setuptools.html#development-mode): ``` -git clone https://github.com/benblamey/HasteStorageClient.git +git clone https://github.com/HASTE-project/HasteStorageClient.git cd HasteStorageClient pip3 install -e . ``` @@ -27,3 +27,7 @@ instead of specifying config in constructor. ### Note It isn't possible to connect to the database server from outside the SNIC cloud, so for local dev/testing you'll need to use port forwarding from another machine. https://help.ubuntu.com/community/SSH/OpenSSH/PortForwarding + + +## Contributors +Ben Blamey, Andreas Hellander