Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions .github/workflows/lint.yml
Original file line number Diff line number Diff line change
Expand Up @@ -19,9 +19,9 @@ jobs:
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: 3.9
python-version: 3.12
- name: Install Linters
run: |
pip install black isort -c stormpiper/requirements_test.txt
pip install -r stormpiper/requirements_test.txt
- name: Run Linters
run: make lint
31 changes: 16 additions & 15 deletions Dockerfile
Original file line number Diff line number Diff line change
@@ -1,28 +1,28 @@
FROM redis:6.2.12-alpine3.18 as redis
FROM redis:6.2.17-alpine3.21 AS redis
COPY ./stormpiper/redis.conf /redis.conf
CMD ["redis-server", "/redis.conf"]


FROM postgis/postgis:14-3.3 as postgis
FROM postgis/postgis:14-3.5 AS postgis


FROM node:18.16.0-bullseye as build-frontend
FROM node:24.10-trixie AS build-frontend
WORKDIR /app
COPY ./stormpiper/stormpiper/spa/package*.json /app/
RUN npm install
COPY ./stormpiper/stormpiper/spa /app/
RUN npm run build


FROM python:3.11.4-slim-bullseye as core-runtime
FROM python:3.11.14-slim-trixie AS core-runtime
RUN apt-get update -y \
&& rm -rf /var/lib/apt/lists/*
WORKDIR /stormpiper
ENV PYTHONPATH=/stormpiper
ENV PATH=/opt/venv/bin:$PATH


FROM core-runtime as base-app
FROM core-runtime AS base-app
COPY ./stormpiper/scripts /
COPY ./stormpiper/alembic.ini /stormpiper/alembic.ini
COPY ./stormpiper/prestart.sh /stormpiper/prestart.sh
Expand All @@ -32,13 +32,14 @@ COPY --from=build-frontend /app/build /stormpiper/stormpiper/spa/build
RUN chmod +x /start.sh /start-pod.sh /start-reload.sh /start-test-container.sh


FROM python:3.11.4-bullseye as base-builder
FROM python:3.11.14-trixie AS base-builder
RUN apt-get update -y \
&& rm -rf /var/lib/apt/lists/* \
&& apt-get clean \
&& pip install -U pip wheel setuptools

FROM base-builder as builder

FROM base-builder AS builder
COPY ./stormpiper/requirements.txt /requirements.txt
RUN mkdir /core \
&& pip wheel \
Expand All @@ -47,10 +48,10 @@ RUN mkdir /core \
RUN mkdir /gunicorn \
&& pip wheel \
--wheel-dir=/gunicorn \
gunicorn==20.1.0
gunicorn==23.0.0


FROM python:3.11.4-slim-bullseye as core-env
FROM python:3.11.14-slim-trixie AS core-env
RUN pip install -U pip wheel setuptools
COPY --from=builder /core /core
COPY ./stormpiper/requirements.txt /requirements.txt
Expand All @@ -65,13 +66,13 @@ RUN pip install \
&& rm -rf /core/*


FROM base-app as stormpiper-pod
FROM base-app AS stormpiper-pod
COPY --from=core-env /opt/venv /opt/venv
EXPOSE 80
CMD /start-pod.sh


FROM stormpiper-pod as stormpiper-test
FROM stormpiper-pod AS stormpiper-test
COPY ./stormpiper/requirements_test.txt /requirements_test.txt
COPY ./stormpiper/pytest.ini /stormpiper/pytest.ini
COPY ./stormpiper/prestart-tests.sh /stormpiper/prestart-tests.sh
Expand All @@ -81,7 +82,7 @@ COPY .coveragerc /stormpiper/.coveragerc
CMD ["bash", "-c", "while true; do sleep 1; done"]


FROM core-runtime as bg_worker
FROM core-runtime AS bg_worker
# Add a user with an explicit UID/GID and create necessary directories
ENV IMG_USER=bg_worker
RUN addgroup --gid 1000 ${IMG_USER} \
Expand All @@ -97,7 +98,7 @@ RUN chmod gu+x /run-worker.sh /run-beat.sh /stormpiper/prestart-worker.sh
CMD ["/run-worker.sh"]


FROM core-env as server-env
FROM core-env AS server-env
COPY --from=builder /gunicorn /gunicorn
RUN pip install \
--no-index \
Expand All @@ -107,13 +108,13 @@ RUN pip install \
&& rm -rf /gunicorn/*


FROM base-app as stormpiper
FROM base-app AS stormpiper
COPY --from=server-env /opt/venv /opt/venv
COPY ./stormpiper/gunicorn_conf.py /gunicorn_conf.py
EXPOSE 80


FROM base-builder as stormpiper-unpinned
FROM base-builder AS stormpiper-unpinned
COPY ./stormpiper/requirements_unpinned.txt /requirements_unpinned.txt
RUN pip install -r /requirements_unpinned.txt

Expand Down
1 change: 0 additions & 1 deletion docker-compose.dev-environment-ci.yml
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
version: '3.7'
services:
stormpiper-test:
environment:
Expand Down
4 changes: 1 addition & 3 deletions docker-compose.dev-postgis.yml
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
version: '3.7'
services:
stormpiper-test:
environment:
Expand Down Expand Up @@ -35,9 +34,8 @@ services:
max-size: 10m
max-file: "3"
ports:
- '5454:5432'
- "5454:5432"
volumes:
- postgres-data:/var/lib/postgresql/data
volumes:
postgres-data:

90 changes: 56 additions & 34 deletions ipynb/EE_zonal_stats.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -32,8 +32,10 @@
"import json\n",
"import ee\n",
"\n",
"service_account = '[email protected]'\n",
"credentials = ee.ServiceAccountCredentials(service_account, '/Users/christiannilsen/Documents/secrets/tncKey.json')## replace with local file or env \n",
"service_account = \"[email protected]\"\n",
"credentials = ee.ServiceAccountCredentials(\n",
" service_account, \"/Users/christiannilsen/Documents/secrets/tncKey.json\"\n",
") ## replace with local file or env\n",
"ee.Initialize(credentials)"
]
},
Expand All @@ -52,8 +54,12 @@
"metadata": {},
"outputs": [],
"source": [
"runoff = ee.Image('projects/ee-stormwaterheatmap/assets/production/Mean_Annual_Q_4_epochs')\n",
"concentrations = ee.Image('projects/ee-stormwaterheatmap/assets/production/coc_concentrations')"
"runoff = ee.Image(\n",
" \"projects/ee-stormwaterheatmap/assets/production/Mean_Annual_Q_4_epochs\"\n",
")\n",
"concentrations = ee.Image(\n",
" \"projects/ee-stormwaterheatmap/assets/production/coc_concentrations\"\n",
")"
]
},
{
Expand Down Expand Up @@ -94,7 +100,7 @@
"metadata": {},
"outputs": [],
"source": [
"print(json.dumps(q_dict.getInfo(),indent=4))"
"print(json.dumps(q_dict.getInfo(), indent=4))"
]
},
{
Expand All @@ -112,7 +118,7 @@
"metadata": {},
"outputs": [],
"source": [
"print(json.dumps(c_dict.getInfo(),indent=4))"
"print(json.dumps(c_dict.getInfo(), indent=4))"
]
},
{
Expand All @@ -130,7 +136,7 @@
"metadata": {},
"outputs": [],
"source": [
"#What bands are available from the runoff image?\n",
"# What bands are available from the runoff image?\n",
"(runoff.bandNames().getInfo())"
]
},
Expand All @@ -141,18 +147,20 @@
"metadata": {},
"outputs": [],
"source": [
"#Select the first epoch \n",
"# Select the first epoch\n",
"Q = runoff.select(0)\n",
"\n",
"#make a multiband image with unit pollutant loading\n",
"unit_loads = Q.multiply(concentrations).multiply(ee.Image(1e-9)) #units for this image are kg/m2/year\n",
"# make a multiband image with unit pollutant loading\n",
"unit_loads = Q.multiply(concentrations).multiply(\n",
" ee.Image(1e-9)\n",
") # units for this image are kg/m2/year\n",
"\n",
"#Use ee.PixelArea() to get the area for each pixel. This image will be in m2. Multiply it by the unit load image\n",
"# Use ee.PixelArea() to get the area for each pixel. This image will be in m2. Multiply it by the unit load image\n",
"loads = unit_loads.multiply(ee.Image.pixelArea())\n",
"\n",
"#Use the layer names to rename the bands to make it pretty\n",
"layer_names = c_dict.get('layer_names').getInfo().split(', ')\n",
"loads = loads.rename(layer_names) "
"# Use the layer names to rename the bands to make it pretty\n",
"layer_names = c_dict.get(\"layer_names\").getInfo().split(\", \")\n",
"loads = loads.rename(layer_names)"
]
},
{
Expand All @@ -162,7 +170,7 @@
"metadata": {},
"outputs": [],
"source": [
"#View layer names\n",
"# View layer names\n",
"layer_names"
]
},
Expand All @@ -181,8 +189,12 @@
"metadata": {},
"outputs": [],
"source": [
"#study area bounds \n",
"Tacoma = ee.FeatureCollection(\"projects/ee-tacoma-watershed/assets/vector/study_area\").union().geometry()"
"# study area bounds\n",
"Tacoma = (\n",
" ee.FeatureCollection(\"projects/ee-tacoma-watershed/assets/vector/study_area\")\n",
" .union()\n",
" .geometry()\n",
")"
]
},
{
Expand All @@ -195,8 +207,13 @@
"from IPython.display import Image\n",
"\n",
"# Display a thumbnail of unit loads just to see.\n",
"Image(url = loads.select('Total Suspended Solids').clip(Tacoma).getThumbUrl({'min': 0, 'max': 100, 'dimensions': 512,\n",
" 'palette': ['#e4f1e1','#7c1d6f']}))"
"Image(\n",
" url=loads.select(\"Total Suspended Solids\")\n",
" .clip(Tacoma)\n",
" .getThumbUrl(\n",
" {\"min\": 0, \"max\": 100, \"dimensions\": 512, \"palette\": [\"#e4f1e1\", \"#7c1d6f\"]}\n",
" )\n",
")"
]
},
{
Expand All @@ -214,11 +231,15 @@
"metadata": {},
"outputs": [],
"source": [
"#pull in some vectors \n",
"subwatersheds = ee.FeatureCollection(\"projects/ee-tacoma-watershed/assets/vector/Tacoma_SWPolys_swSubbasin\")\n",
"# pull in some vectors\n",
"subwatersheds = ee.FeatureCollection(\n",
" \"projects/ee-tacoma-watershed/assets/vector/Tacoma_SWPolys_swSubbasin\"\n",
")\n",
"\n",
"#calculate load per subwatershed \n",
"load_stats = loads.reduceRegions(collection=subwatersheds, reducer=ee.Reducer.sum(), scale=5) #sum of values for each feature. For mean values, use ee.Reducer.mean()"
"# calculate load per subwatershed\n",
"load_stats = loads.reduceRegions(\n",
" collection=subwatersheds, reducer=ee.Reducer.sum(), scale=5\n",
") # sum of values for each feature. For mean values, use ee.Reducer.mean()"
]
},
{
Expand All @@ -238,15 +259,15 @@
"source": [
"import pandas as pd\n",
"\n",
"#pull from server \n",
"load_stats_dict = load_stats.getInfo() \n",
"# pull from server\n",
"load_stats_dict = load_stats.getInfo()\n",
"\n",
"#get data as a pandas dataframe \n",
"# get data as a pandas dataframe\n",
"load_stats_df = pd.DataFrame()\n",
"for feat in load_stats_dict['features']:\n",
" df = pd.DataFrame([feat['properties']],columns=feat['properties'].keys())\n",
" load_stats_df = pd.concat([load_stats_df, df], sort=True, axis=0)\n",
" \n",
"for feat in load_stats_dict[\"features\"]:\n",
" df = pd.DataFrame([feat[\"properties\"]], columns=feat[\"properties\"].keys())\n",
" load_stats_df = pd.concat([load_stats_df, df], sort=True, axis=0)\n",
"\n",
"load_stats_df"
]
},
Expand All @@ -265,9 +286,10 @@
"metadata": {},
"outputs": [],
"source": [
"#as a geopandas df \n",
"# as a geopandas df\n",
"import geopandas as gpd\n",
"gdf = gpd.GeoDataFrame.from_features(load_stats_dict.get('features'))\n",
"\n",
"gdf = gpd.GeoDataFrame.from_features(load_stats_dict.get(\"features\"))\n",
"gdf"
]
},
Expand All @@ -278,8 +300,8 @@
"metadata": {},
"outputs": [],
"source": [
"#Plot geodataframe\n",
"gdf.plot(column='Total Suspended Solids')"
"# Plot geodataframe\n",
"gdf.plot(column=\"Total Suspended Solids\")"
]
},
{
Expand Down
Loading
Loading