diff --git a/api/Dockerfile.openapi b/api/Dockerfile.openapi index a53a03a1..77d5a9d0 100644 --- a/api/Dockerfile.openapi +++ b/api/Dockerfile.openapi @@ -9,7 +9,7 @@ RUN go install github.com/swaggo/swag/cmd/swag@latest COPY . . RUN swag init --ot json --pd -g cmd/midas-api/main.go --parseInternal true --dir internal -RUN find ./docs -type f -exec sed -i '' -e 's/github_com_USACE_instrumentation-api_api_internal_model.//g' {} \; +RUN find ./docs -type f -exec sed -i '' -e 's/github_com_USACE_instrumentation-api_api_internal_//g' {} \; FROM openapitools/openapi-generator-cli:latest-release AS docgen WORKDIR /work diff --git a/api/go.mod b/api/go.mod index 186ebf86..917c7209 100644 --- a/api/go.mod +++ b/api/go.mod @@ -17,15 +17,12 @@ require ( github.com/golang-jwt/jwt/v5 v5.2.1 github.com/google/uuid v1.6.0 github.com/hashicorp/go-version v1.7.0 - github.com/jackc/pgconn v1.14.3 - github.com/jackc/pgerrcode v0.0.0-20240316143900-6e2875d9b438 github.com/jackc/pgtype v1.14.3 - github.com/jackc/pgx/v5 v5.7.0 + github.com/jackc/pgx/v5 v5.7.1 github.com/jmoiron/sqlx v1.4.0 github.com/labstack/echo-jwt/v4 v4.2.0 github.com/labstack/echo/v4 v4.12.0 github.com/lib/pq v1.10.9 - github.com/paulmach/orb v0.11.1 github.com/stretchr/testify v1.9.0 github.com/tidwall/btree v1.7.0 github.com/xeipuuv/gojsonschema v1.2.0 @@ -51,13 +48,11 @@ require ( github.com/aws/smithy-go v1.20.4 // indirect github.com/davecgh/go-spew v1.1.1 // indirect github.com/golang-jwt/jwt v3.2.2+incompatible // indirect - github.com/jackc/chunkreader/v2 v2.0.1 // indirect github.com/jackc/pgio v1.0.0 // indirect github.com/jackc/pgpassfile v1.0.0 // indirect - github.com/jackc/pgproto3/v2 v2.3.3 // indirect github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761 // indirect github.com/jackc/pgx/v4 v4.18.3 // indirect - github.com/jackc/puddle/v2 v2.2.1 // indirect + github.com/jackc/puddle/v2 v2.2.2 // indirect github.com/kr/pretty v0.3.1 // indirect github.com/labstack/gommon v0.4.2 // indirect github.com/mattn/go-colorable v0.1.13 // indirect @@ -68,7 +63,6 @@ require ( github.com/valyala/fasttemplate v1.2.2 // indirect github.com/xeipuuv/gojsonpointer v0.0.0-20180127040702-4e3ac2762d5f // indirect github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 // indirect - go.mongodb.org/mongo-driver v1.16.1 // indirect golang.org/x/net v0.29.0 // indirect golang.org/x/sync v0.8.0 // indirect golang.org/x/sys v0.25.0 // indirect diff --git a/api/go.sum b/api/go.sum index f7ebf596..77f8e251 100644 --- a/api/go.sum +++ b/api/go.sum @@ -1,4 +1,3 @@ -filippo.io/edwards25519 v1.1.0 h1:FNf4tywRC1HmFuKW5xopWpigGjJKiJSV0Cqo0cJWDaA= filippo.io/edwards25519 v1.1.0/go.mod h1:BxyFTGdWcka3PhytdK4V28tE5sGfRvvvRV7EaN4VDT4= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= github.com/Knetic/govaluate v3.0.1-0.20171022003610-9aa49832a739+incompatible h1:1G1pk05UrOh0NlF1oeaaix1x8XzrfjIDK47TY0Zehcw= @@ -71,28 +70,21 @@ github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSs github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo= github.com/go-kit/log v0.1.0/go.mod h1:zbhenjAZHb184qTLMA9ZjW7ThYL0H2mk7Q6pNt4vbaY= github.com/go-logfmt/logfmt v0.5.0/go.mod h1:wCYkCAKZfumFQihp8CzCvQ3paCTfi41vtzG1KdI/P7A= -github.com/go-sql-driver/mysql v1.8.1 h1:LedoTUt/eveggdHS9qUFC1EFSa8bU2+1pZjSRpvNJ1Y= github.com/go-sql-driver/mysql v1.8.1/go.mod h1:wEBSXgmK//2ZFJyE+qWnIsVGmvmEKlqwuVSjsCm7DZg= github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY= github.com/gofrs/uuid v4.0.0+incompatible/go.mod h1:b2aQJv3Z4Fp6yNu3cdSllBxTCLRxnplIgP/c0N/04lM= -github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= github.com/golang-jwt/jwt v3.2.2+incompatible h1:IfV12K8xAKAnZqdXVzCZ+TOjboZ2keLg81eXfW3O+oY= github.com/golang-jwt/jwt v3.2.2+incompatible/go.mod h1:8pz2t5EyA70fFQQSrl6XZXzqecmYZeUEB8OUGHkxJ+I= github.com/golang-jwt/jwt/v5 v5.2.1 h1:OuVbFODueb089Lh128TAcimifWaLhJwVflnrgM17wHk= github.com/golang-jwt/jwt/v5 v5.2.1/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk= github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= -github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk= -github.com/golang/snappy v0.0.1/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= -github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= -github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/hashicorp/go-version v1.7.0 h1:5tqGy27NaOTB8yJKUZELlFAS/LTKJkrmONwQKeRZfjY= github.com/hashicorp/go-version v1.7.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA= github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU= +github.com/jackc/chunkreader v1.0.0 h1:4s39bBR8ByfqH+DKm8rQA3E1LHZWB9XWcrz8fqaZbe0= github.com/jackc/chunkreader v1.0.0/go.mod h1:RT6O25fNZIuasFJRyZ4R/Y2BbhasbmZXF9QQ7T3kePo= github.com/jackc/chunkreader/v2 v2.0.0/go.mod h1:odVSm741yZoC3dpHEUXIqA9tQRhFrgOHwnPIn9lDKlk= github.com/jackc/chunkreader/v2 v2.0.1 h1:i+RDz65UE+mmpjTfyz0MoVTnzeYxroil2G82ki7MGG8= @@ -105,16 +97,14 @@ github.com/jackc/pgconn v1.9.0/go.mod h1:YctiPyvzfU11JFxoXokUOOKQXQmDMoJL9vJzHH8 github.com/jackc/pgconn v1.9.1-0.20210724152538-d89c8390a530/go.mod h1:4z2w8XhRbP1hYxkpTuBjTS3ne3J48K83+u0zoyvg2pI= github.com/jackc/pgconn v1.14.3 h1:bVoTr12EGANZz66nZPkMInAV/KHD2TxH9npjXXgiB3w= github.com/jackc/pgconn v1.14.3/go.mod h1:RZbme4uasqzybK2RK5c65VsHxoyaml09lx3tXOcO/VM= -github.com/jackc/pgerrcode v0.0.0-20240316143900-6e2875d9b438 h1:Dj0L5fhJ9F82ZJyVOmBx6msDp/kfd1t9GRfny/mfJA0= -github.com/jackc/pgerrcode v0.0.0-20240316143900-6e2875d9b438/go.mod h1:a/s9Lp5W7n/DD0VrVoyJ00FbP2ytTPDVOivvn2bMlds= github.com/jackc/pgio v1.0.0 h1:g12B9UwVnzGhueNavwioyEEpAmqMe1E/BN9ES+8ovkE= github.com/jackc/pgio v1.0.0/go.mod h1:oP+2QK2wFfUWgr+gxjoBH9KGBb31Eio69xUb0w5bYf8= github.com/jackc/pgmock v0.0.0-20190831213851-13a1b77aafa2/go.mod h1:fGZlG77KXmcq05nJLRkk0+p82V8B8Dw8KN2/V9c/OAE= github.com/jackc/pgmock v0.0.0-20201204152224-4fe30f7445fd/go.mod h1:hrBW0Enj2AZTNpt/7Y5rr2xe/9Mn757Wtb2xeBzPv2c= -github.com/jackc/pgmock v0.0.0-20210724152146-4ad1a8207f65 h1:DadwsjnMwFjfWc9y5Wi/+Zz7xoE5ALHsRQlOctkOiHc= github.com/jackc/pgmock v0.0.0-20210724152146-4ad1a8207f65/go.mod h1:5R2h2EEX+qri8jOWMbJCtaPWkrrNc7OHwsp2TCqp7ak= github.com/jackc/pgpassfile v1.0.0 h1:/6Hmqy13Ss2zCq62VdNG8tM1wchn8zjSGOBJ6icpsIM= github.com/jackc/pgpassfile v1.0.0/go.mod h1:CEx0iS5ambNFdcRtxPj5JhEz+xB6uRky5eyVu/W2HEg= +github.com/jackc/pgproto3 v1.1.0 h1:FYYE4yRw+AgI8wXIinMlNjBbp/UitDJwfj5LqqewP1A= github.com/jackc/pgproto3 v1.1.0/go.mod h1:eR5FA3leWg7p9aeAqi37XOTgTIbkABlvcPB3E5rlc78= github.com/jackc/pgproto3/v2 v2.0.0-alpha1.0.20190420180111-c116219b62db/go.mod h1:bhq50y+xrl9n5mRYyCBFKkpRVTLYJVWeCc+mEAI3yXA= github.com/jackc/pgproto3/v2 v2.0.0-alpha1.0.20190609003834-432c2951c711/go.mod h1:uH0AWtUmuShn0bcesswc4aBTWGvw0cAxIJp+6OB//Wg= @@ -142,22 +132,20 @@ github.com/jackc/pgx/v4 v4.12.1-0.20210724153913-640aa07df17c/go.mod h1:1QD0+tgS github.com/jackc/pgx/v4 v4.18.2/go.mod h1:Ey4Oru5tH5sB6tV7hDmfWFahwF15Eb7DNXlRKx2CkVw= github.com/jackc/pgx/v4 v4.18.3 h1:dE2/TrEsGX3RBprb3qryqSV9Y60iZN1C6i8IrmW9/BA= github.com/jackc/pgx/v4 v4.18.3/go.mod h1:Ey4Oru5tH5sB6tV7hDmfWFahwF15Eb7DNXlRKx2CkVw= -github.com/jackc/pgx/v5 v5.7.0 h1:FG6VLIdzvAPhnYqP14sQ2xhFLkiUQHCs6ySqO91kF4g= -github.com/jackc/pgx/v5 v5.7.0/go.mod h1:awP1KNnjylvpxHuHP63gzjhnGkI1iw+PMoIwvoleN/8= +github.com/jackc/pgx/v5 v5.7.1 h1:x7SYsPBYDkHDksogeSmZZ5xzThcTgRz++I5E+ePFUcs= +github.com/jackc/pgx/v5 v5.7.1/go.mod h1:e7O26IywZZ+naJtWWos6i6fvWK+29etgITqrqHLfoZA= github.com/jackc/puddle v0.0.0-20190413234325-e4ced69a3a2b/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk= github.com/jackc/puddle v0.0.0-20190608224051-11cab39313c9/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk= github.com/jackc/puddle v1.1.3/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk= github.com/jackc/puddle v1.3.0/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk= -github.com/jackc/puddle/v2 v2.2.1 h1:RhxXJtFG022u4ibrCSMSiu5aOq1i77R3OHKNJj77OAk= -github.com/jackc/puddle/v2 v2.2.1/go.mod h1:vriiEXHvEE654aYKXXjOvZM39qJ0q+azkZFrfEOc3H4= +github.com/jackc/puddle/v2 v2.2.2 h1:PR8nw+E/1w0GLuRFSmiioY6UooMp6KJv0/61nB7icHo= +github.com/jackc/puddle/v2 v2.2.2/go.mod h1:vriiEXHvEE654aYKXXjOvZM39qJ0q+azkZFrfEOc3H4= github.com/jessevdk/go-flags v0.0.0-20141203071132-1679536dcc89/go.mod h1:4FA24M0QyGHXBuZZK/XkWh8h0e1EYbRYJSGM75WSRxI= github.com/jmoiron/sqlx v1.4.0 h1:1PLqN7S1UYp5t4SrVVnt4nUVNemrDAtxlulVe+Qgm3o= github.com/jmoiron/sqlx v1.4.0/go.mod h1:ZrZ7UsYB/weZdl2Bxg6jCRO9c3YHl8r3ahlKmRT4JLY= github.com/jrick/logrotate v1.0.0/go.mod h1:LNinyqDIJnpAur+b8yyulnQw/wDuN1+BYKlTRt3OuAQ= -github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8= github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= github.com/kkdai/bstream v0.0.0-20161212061736-f391b8402d23/go.mod h1:J+Gs4SYgM6CZQHDETBtE9HaSEkGmuNXF86RwHhHUvq4= -github.com/klauspost/compress v1.13.6/go.mod h1:/3/Vjq9QcHkK5uEr5lBEmyoZ1iFhe47etQ6QUkpK6sk= github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= github.com/konsorten/go-windows-terminal-sequences v1.0.2/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= @@ -190,18 +178,12 @@ github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Ky github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM= github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY= github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= -github.com/mattn/go-sqlite3 v1.14.22 h1:2gZY6PC6kBnID23Tichd1K+Z0oS6nE/XwU+Vz/5o4kU= github.com/mattn/go-sqlite3 v1.14.22/go.mod h1:Uh1q+B4BYcTPb+yiD3kU8Ct7aC0hY9fxUwlHK0RXw+Y= -github.com/montanaflynn/stats v0.0.0-20171201202039-1bf9dbcd8cbe/go.mod h1:wL8QJuTMNUDYhXwkmfOly8iTdp5TEcJFWZD2D7SIkUc= github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= github.com/onsi/ginkgo v1.7.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= github.com/onsi/gomega v1.4.3/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY= -github.com/paulmach/orb v0.11.1 h1:3koVegMC4X/WeiXYz9iswopaTwMem53NzTJuTF20JzU= -github.com/paulmach/orb v0.11.1/go.mod h1:5mULz1xQfs3bmQm63QEJA6lNGujuRafwA5S/EnuLaLU= -github.com/paulmach/protoscan v0.2.1/go.mod h1:SpcSwydNLrxUGSDvXvO0P7g7AuhJ7lcKfDlhJCDw2gY= github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA= github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= -github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= @@ -225,7 +207,6 @@ github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXf github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA= -github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= @@ -234,28 +215,18 @@ github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsT github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= github.com/tidwall/btree v1.7.0 h1:L1fkJH/AuEh5zBnnBbmTwQ5Lt+bRJ5A8EWecslvo9iI= github.com/tidwall/btree v1.7.0/go.mod h1:twD9XRA5jj9VUQGELzDO4HPQTNJsoWWfYEL+EUQ2cKY= -github.com/tidwall/pretty v1.0.0/go.mod h1:XNkn88O1ChpSDQmQeStsy+sBenx6DDtFZJxhVysOjyk= github.com/valyala/bytebufferpool v1.0.0 h1:GqA5TC/0021Y/b9FG4Oi9Mr3q7XYx6KllzawFIhcdPw= github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc= github.com/valyala/fasttemplate v1.2.2 h1:lxLXG0uE3Qnshl9QyaK6XJxMXlQZELvChBOCmQD0Loo= github.com/valyala/fasttemplate v1.2.2/go.mod h1:KHLXt3tVN2HBp8eijSv/kGJopbvo7S+qRAEEKiv+SiQ= -github.com/xdg-go/pbkdf2 v1.0.0/go.mod h1:jrpuAogTd400dnrH08LKmI/xc1MbPOebTwRqcT5RDeI= -github.com/xdg-go/scram v1.1.1/go.mod h1:RaEWvsqvNKKvBPvcKeFjrG2cJqOkHTiyTpzz23ni57g= -github.com/xdg-go/stringprep v1.0.3/go.mod h1:W3f5j4i+9rC0kuIEJL0ky1VpHXQU3ocBgklLGvcBnW8= github.com/xeipuuv/gojsonpointer v0.0.0-20180127040702-4e3ac2762d5f h1:J9EGpcZtP0E/raorCMxlFGSTBrsSlaDGf3jU/qvAE2c= github.com/xeipuuv/gojsonpointer v0.0.0-20180127040702-4e3ac2762d5f/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU= github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 h1:EzJWgHovont7NscjpAxXsDA8S8BMYve8Y5+7cuRE7R0= github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415/go.mod h1:GwrjFmJcFw6At/Gs6z4yjiIwzuJ1/+UwLxMQDVQXShQ= github.com/xeipuuv/gojsonschema v1.2.0 h1:LhYJRs+L4fBtjZUfuSZIKGeVu0QRy8e5Xi7D17UxZ74= github.com/xeipuuv/gojsonschema v1.2.0/go.mod h1:anYRn/JVcOK2ZgGU+IjEV4nwlhoK5sQluxsYJ78Id3Y= -github.com/youmark/pkcs8 v0.0.0-20181117223130-1be2e3e5546d/go.mod h1:rHwXgn7JulP+udvsHwJoVG1YGAP6VLg4y9I5dyZdqmA= -github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= -github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= github.com/zenazn/goji v0.9.0/go.mod h1:7S9M489iMyHBNxwZnk9/EHS098H4/F6TATF2mIxtB1Q= -go.mongodb.org/mongo-driver v1.11.4/go.mod h1:PTSz5yu21bkT/wXpkS7WR5f0ddqw5quethTUn9WM+2g= -go.mongodb.org/mongo-driver v1.16.1 h1:rIVLL3q0IHM39dvE+z2ulZLp9ENZKThVfuvN/IiN4l8= -go.mongodb.org/mongo-driver v1.16.1/go.mod h1:oB6AhJQvFQL4LEHyXi6aJzQJtBiTQHiAd83l0GdFaiw= go.uber.org/atomic v1.3.2/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= go.uber.org/atomic v1.4.0/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= go.uber.org/atomic v1.5.0/go.mod h1:sABNBOSYdrvTF6hTgEIbc7YasKWGhgEQZyfxyTvoXHQ= @@ -279,7 +250,6 @@ golang.org/x/crypto v0.0.0-20201203163018-be400aefbc4c/go.mod h1:jdWPYTVW3xRLrWP golang.org/x/crypto v0.0.0-20210616213533-5ff15b29337e/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/crypto v0.0.0-20210711020723-a769d52b0f97/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= -golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU= golang.org/x/crypto v0.20.0/go.mod h1:Xwo95rrVNIoSMx9wa1JroENMToLWn3RNVrTBpLHgZPQ= golang.org/x/crypto v0.27.0 h1:GXm2NjJrPaiv/h1tb2UH8QfgC/hOf/+z0p6PT8o1w7A= @@ -289,8 +259,6 @@ golang.org/x/image v0.20.0/go.mod h1:0a88To4CYVBAHp5FXJm8o7QbUl37Vd85ply1vyD8auM golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc= golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= -golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= -golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= @@ -298,10 +266,7 @@ golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20190813141303-74dc4d7220e7/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= -golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg= @@ -310,9 +275,6 @@ golang.org/x/net v0.29.0 h1:5ORfpBpCs4HzDYoodCDBbwHzdR5UrLBZ3sOnUJmFoHo= golang.org/x/net v0.29.0/go.mod h1:gLkgy8jTGERgjzMic6DS9+SP0ajcu6Xu3Orq/SpETg0= golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.8.0 h1:3NFvSEYkUoMifnESzZl15y791HH1qU2xm6eCJU5ZPXQ= @@ -328,9 +290,7 @@ golang.org/x/sys v0.0.0-20190813064441-fde4db37ae7a/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= @@ -369,18 +329,13 @@ golang.org/x/tools v0.0.0-20191029041327-9cc4af7d6b2c/go.mod h1:b+2E5dAYhXwXZwtn golang.org/x/tools v0.0.0-20191029190741-b9c20aec41a5/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20200103221440-774c71fcf114/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= -golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU= golang.org/x/xerrors v0.0.0-20190410155217-1f06c39b4373/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20190513163551-3ee3066db522/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= -google.golang.org/protobuf v1.27.1/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= diff --git a/api/internal/db/alert.sql_gen.go b/api/internal/db/alert.sql_gen.go new file mode 100644 index 00000000..fa2a7875 --- /dev/null +++ b/api/internal/db/alert.sql_gen.go @@ -0,0 +1,218 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.27.0 +// source: alert.sql + +package db + +import ( + "context" + "time" + + "github.com/google/uuid" +) + +const alertCreate = `-- name: AlertCreate :exec +insert into alert (alert_config_id) values ($1) +` + +func (q *Queries) AlertCreate(ctx context.Context, alertConfigID uuid.UUID) error { + _, err := q.db.Exec(ctx, alertCreate, alertConfigID) + return err +} + +const alertGet = `-- name: AlertGet :one +select a.id, a.alert_config_id, a.created_at, a.project_id, a.project_name, a.name, a.body, a.instruments, + case when r.alert_id is not null then true else false + end as read +from v_alert a +left join alert_read r on r.alert_id = a.id +inner join alert_profile_subscription aps on a.alert_config_id = aps.alert_config_id +where aps.profile_id = $1 +and a.id = $2 +` + +type AlertGetParams struct { + ProfileID uuid.UUID `json:"profile_id"` + ID uuid.UUID `json:"id"` +} + +type AlertGetRow struct { + ID uuid.UUID `json:"id"` + AlertConfigID uuid.UUID `json:"alert_config_id"` + CreatedAt time.Time `json:"created_at"` + ProjectID uuid.UUID `json:"project_id"` + ProjectName string `json:"project_name"` + Name string `json:"name"` + Body string `json:"body"` + Instruments []InstrumentIDName `json:"instruments"` + Read bool `json:"read"` +} + +func (q *Queries) AlertGet(ctx context.Context, arg AlertGetParams) (AlertGetRow, error) { + row := q.db.QueryRow(ctx, alertGet, arg.ProfileID, arg.ID) + var i AlertGetRow + err := row.Scan( + &i.ID, + &i.AlertConfigID, + &i.CreatedAt, + &i.ProjectID, + &i.ProjectName, + &i.Name, + &i.Body, + &i.Instruments, + &i.Read, + ) + return i, err +} + +const alertListForInstrument = `-- name: AlertListForInstrument :many +select id, alert_config_id, created_at, project_id, project_name, name, body, instruments from v_alert +where alert_config_id = any( + select id from alert_config_instrument + where instrument_id = $1 +) +` + +func (q *Queries) AlertListForInstrument(ctx context.Context, instrumentID uuid.UUID) ([]VAlert, error) { + rows, err := q.db.Query(ctx, alertListForInstrument, instrumentID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []VAlert{} + for rows.Next() { + var i VAlert + if err := rows.Scan( + &i.ID, + &i.AlertConfigID, + &i.CreatedAt, + &i.ProjectID, + &i.ProjectName, + &i.Name, + &i.Body, + &i.Instruments, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const alertListForProfile = `-- name: AlertListForProfile :many +select a.id, a.alert_config_id, a.created_at, a.project_id, a.project_name, a.name, a.body, a.instruments, + case when r.alert_id is not null then true else false + end as read +from v_alert a +left join alert_read r on r.alert_id = a.id +inner join alert_profile_subscription aps on a.alert_config_id = aps.alert_config_id +where aps.profile_id = $1 +` + +type AlertListForProfileRow struct { + ID uuid.UUID `json:"id"` + AlertConfigID uuid.UUID `json:"alert_config_id"` + CreatedAt time.Time `json:"created_at"` + ProjectID uuid.UUID `json:"project_id"` + ProjectName string `json:"project_name"` + Name string `json:"name"` + Body string `json:"body"` + Instruments []InstrumentIDName `json:"instruments"` + Read bool `json:"read"` +} + +func (q *Queries) AlertListForProfile(ctx context.Context, profileID uuid.UUID) ([]AlertListForProfileRow, error) { + rows, err := q.db.Query(ctx, alertListForProfile, profileID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []AlertListForProfileRow{} + for rows.Next() { + var i AlertListForProfileRow + if err := rows.Scan( + &i.ID, + &i.AlertConfigID, + &i.CreatedAt, + &i.ProjectID, + &i.ProjectName, + &i.Name, + &i.Body, + &i.Instruments, + &i.Read, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const alertListForProject = `-- name: AlertListForProject :many +select id, alert_config_id, created_at, project_id, project_name, name, body, instruments from v_alert where project_id = $1 +` + +func (q *Queries) AlertListForProject(ctx context.Context, projectID uuid.UUID) ([]VAlert, error) { + rows, err := q.db.Query(ctx, alertListForProject, projectID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []VAlert{} + for rows.Next() { + var i VAlert + if err := rows.Scan( + &i.ID, + &i.AlertConfigID, + &i.CreatedAt, + &i.ProjectID, + &i.ProjectName, + &i.Name, + &i.Body, + &i.Instruments, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const alertReadCreate = `-- name: AlertReadCreate :exec +insert into alert_read (profile_id, alert_id) values ($1, $2) +on conflict do nothing +` + +type AlertReadCreateParams struct { + ProfileID uuid.UUID `json:"profile_id"` + AlertID uuid.UUID `json:"alert_id"` +} + +func (q *Queries) AlertReadCreate(ctx context.Context, arg AlertReadCreateParams) error { + _, err := q.db.Exec(ctx, alertReadCreate, arg.ProfileID, arg.AlertID) + return err +} + +const alertReadDelete = `-- name: AlertReadDelete :exec +delete from alert_read where profile_id = $1 and alert_id = $2 +` + +type AlertReadDeleteParams struct { + ProfileID uuid.UUID `json:"profile_id"` + AlertID uuid.UUID `json:"alert_id"` +} + +func (q *Queries) AlertReadDelete(ctx context.Context, arg AlertReadDeleteParams) error { + _, err := q.db.Exec(ctx, alertReadDelete, arg.ProfileID, arg.AlertID) + return err +} diff --git a/api/internal/db/alert_check.sql_gen.go b/api/internal/db/alert_check.sql_gen.go new file mode 100644 index 00000000..293f9b73 --- /dev/null +++ b/api/internal/db/alert_check.sql_gen.go @@ -0,0 +1,127 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.27.0 +// source: alert_check.sql + +package db + +import ( + "context" + "time" + + "github.com/google/uuid" +) + +const alertConfigListUpdateLastCheckedAt = `-- name: AlertConfigListUpdateLastCheckedAt :many +update alert_config ac1 +set last_checked_at = now() +from ( + select id, name, body, created_by, created_by_username, created_at, updated_by, updated_by_username, updated_at, project_id, project_name, alert_type_id, alert_type, started_at, schedule_interval, mute_consecutive_alerts, remind_interval, warning_interval, last_checked_at, last_reminded_at, create_next_submittal_from, instruments, alert_email_subscriptions + from v_alert_config +) ac2 +where ac1.id = ac2.id +returning ac2.id, ac2.name, ac2.body, ac2.created_by, ac2.created_by_username, ac2.created_at, ac2.updated_by, ac2.updated_by_username, ac2.updated_at, ac2.project_id, ac2.project_name, ac2.alert_type_id, ac2.alert_type, ac2.started_at, ac2.schedule_interval, ac2.mute_consecutive_alerts, ac2.remind_interval, ac2.warning_interval, ac2.last_checked_at, ac2.last_reminded_at, ac2.create_next_submittal_from, ac2.instruments, ac2.alert_email_subscriptions +` + +func (q *Queries) AlertConfigListUpdateLastCheckedAt(ctx context.Context) ([]VAlertConfig, error) { + rows, err := q.db.Query(ctx, alertConfigListUpdateLastCheckedAt) + if err != nil { + return nil, err + } + defer rows.Close() + items := []VAlertConfig{} + for rows.Next() { + var i VAlertConfig + if err := rows.Scan( + &i.ID, + &i.Name, + &i.Body, + &i.CreatedBy, + &i.CreatedByUsername, + &i.CreatedAt, + &i.UpdatedBy, + &i.UpdatedByUsername, + &i.UpdatedAt, + &i.ProjectID, + &i.ProjectName, + &i.AlertTypeID, + &i.AlertType, + &i.StartedAt, + &i.ScheduleInterval, + &i.MuteConsecutiveAlerts, + &i.RemindInterval, + &i.WarningInterval, + &i.LastCheckedAt, + &i.LastRemindedAt, + &i.CreateNextSubmittalFrom, + &i.Instruments, + &i.AlertEmailSubscriptions, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const alertConfigUpdateLastRemindedAt = `-- name: AlertConfigUpdateLastRemindedAt :exec +update alert_config set last_reminded_at = $2 where id = $1 +` + +type AlertConfigUpdateLastRemindedAtParams struct { + ID uuid.UUID `json:"id"` + LastRemindedAt *time.Time `json:"last_reminded_at"` +} + +func (q *Queries) AlertConfigUpdateLastRemindedAt(ctx context.Context, arg AlertConfigUpdateLastRemindedAtParams) error { + _, err := q.db.Exec(ctx, alertConfigUpdateLastRemindedAt, arg.ID, arg.LastRemindedAt) + return err +} + +const submittalCreateNextFromNewAlertConfigDate = `-- name: SubmittalCreateNextFromNewAlertConfigDate :exec +insert into submittal (alert_config_id, created_at, due_at) +select + ac.id, + $2::timestamptz, + $2::timestamptz + ac.schedule_interval +from alert_config ac +where ac.id = $1 +` + +type SubmittalCreateNextFromNewAlertConfigDateParams struct { + ID uuid.UUID `json:"id"` + Date time.Time `json:"date"` +} + +func (q *Queries) SubmittalCreateNextFromNewAlertConfigDate(ctx context.Context, arg SubmittalCreateNextFromNewAlertConfigDateParams) error { + _, err := q.db.Exec(ctx, submittalCreateNextFromNewAlertConfigDate, arg.ID, arg.Date) + return err +} + +const submittalUpdateCompletionDateOrWarningSent = `-- name: SubmittalUpdateCompletionDateOrWarningSent :exec +update submittal set + submittal_status_id = $2, + completed_at = $3, + warning_sent = $4 +where id = $1 +` + +type SubmittalUpdateCompletionDateOrWarningSentParams struct { + ID uuid.UUID `json:"id"` + SubmittalStatusID *uuid.UUID `json:"submittal_status_id"` + CompletedAt *time.Time `json:"completed_at"` + WarningSent bool `json:"warning_sent"` +} + +func (q *Queries) SubmittalUpdateCompletionDateOrWarningSent(ctx context.Context, arg SubmittalUpdateCompletionDateOrWarningSentParams) error { + _, err := q.db.Exec(ctx, submittalUpdateCompletionDateOrWarningSent, + arg.ID, + arg.SubmittalStatusID, + arg.CompletedAt, + arg.WarningSent, + ) + return err +} diff --git a/api/internal/db/alert_config.sql_gen.go b/api/internal/db/alert_config.sql_gen.go new file mode 100644 index 00000000..b31c65da --- /dev/null +++ b/api/internal/db/alert_config.sql_gen.go @@ -0,0 +1,373 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.27.0 +// source: alert_config.sql + +package db + +import ( + "context" + "time" + + "github.com/google/uuid" +) + +const alertConfigCreate = `-- name: AlertConfigCreate :one +insert into alert_config ( + project_id, + name, + body, + alert_type_id, + started_at, + schedule_interval, + mute_consecutive_alerts, + remind_interval, + warning_interval, + created_by, + created_at +) values ($1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11) +returning id +` + +type AlertConfigCreateParams struct { + ProjectID uuid.UUID `json:"project_id"` + Name string `json:"name"` + Body string `json:"body"` + AlertTypeID uuid.UUID `json:"alert_type_id"` + StartedAt time.Time `json:"started_at"` + ScheduleInterval string `json:"schedule_interval"` + MuteConsecutiveAlerts bool `json:"mute_consecutive_alerts"` + RemindInterval string `json:"remind_interval"` + WarningInterval string `json:"warning_interval"` + CreatedBy uuid.UUID `json:"created_by"` + CreatedAt time.Time `json:"created_at"` +} + +func (q *Queries) AlertConfigCreate(ctx context.Context, arg AlertConfigCreateParams) (uuid.UUID, error) { + row := q.db.QueryRow(ctx, alertConfigCreate, + arg.ProjectID, + arg.Name, + arg.Body, + arg.AlertTypeID, + arg.StartedAt, + arg.ScheduleInterval, + arg.MuteConsecutiveAlerts, + arg.RemindInterval, + arg.WarningInterval, + arg.CreatedBy, + arg.CreatedAt, + ) + var id uuid.UUID + err := row.Scan(&id) + return id, err +} + +const alertConfigDelete = `-- name: AlertConfigDelete :exec +update alert_config set deleted=true where id = $1 +` + +func (q *Queries) AlertConfigDelete(ctx context.Context, id uuid.UUID) error { + _, err := q.db.Exec(ctx, alertConfigDelete, id) + return err +} + +const alertConfigGet = `-- name: AlertConfigGet :one +select id, name, body, created_by, created_by_username, created_at, updated_by, updated_by_username, updated_at, project_id, project_name, alert_type_id, alert_type, started_at, schedule_interval, mute_consecutive_alerts, remind_interval, warning_interval, last_checked_at, last_reminded_at, create_next_submittal_from, instruments, alert_email_subscriptions from v_alert_config where id = $1 +` + +func (q *Queries) AlertConfigGet(ctx context.Context, id uuid.UUID) (VAlertConfig, error) { + row := q.db.QueryRow(ctx, alertConfigGet, id) + var i VAlertConfig + err := row.Scan( + &i.ID, + &i.Name, + &i.Body, + &i.CreatedBy, + &i.CreatedByUsername, + &i.CreatedAt, + &i.UpdatedBy, + &i.UpdatedByUsername, + &i.UpdatedAt, + &i.ProjectID, + &i.ProjectName, + &i.AlertTypeID, + &i.AlertType, + &i.StartedAt, + &i.ScheduleInterval, + &i.MuteConsecutiveAlerts, + &i.RemindInterval, + &i.WarningInterval, + &i.LastCheckedAt, + &i.LastRemindedAt, + &i.CreateNextSubmittalFrom, + &i.Instruments, + &i.AlertEmailSubscriptions, + ) + return i, err +} + +const alertConfigInstrumentCreateAssignment = `-- name: AlertConfigInstrumentCreateAssignment :exec +insert into alert_config_instrument (alert_config_id, instrument_id) values ($1, $2) +` + +type AlertConfigInstrumentCreateAssignmentParams struct { + AlertConfigID uuid.UUID `json:"alert_config_id"` + InstrumentID uuid.UUID `json:"instrument_id"` +} + +func (q *Queries) AlertConfigInstrumentCreateAssignment(ctx context.Context, arg AlertConfigInstrumentCreateAssignmentParams) error { + _, err := q.db.Exec(ctx, alertConfigInstrumentCreateAssignment, arg.AlertConfigID, arg.InstrumentID) + return err +} + +const alertConfigInstrumentDeleteAssignmentsForAlertConfig = `-- name: AlertConfigInstrumentDeleteAssignmentsForAlertConfig :exec +delete from alert_config_instrument where alert_config_id = $1 +` + +func (q *Queries) AlertConfigInstrumentDeleteAssignmentsForAlertConfig(ctx context.Context, alertConfigID uuid.UUID) error { + _, err := q.db.Exec(ctx, alertConfigInstrumentDeleteAssignmentsForAlertConfig, alertConfigID) + return err +} + +const alertConfigListForInstrument = `-- name: AlertConfigListForInstrument :many +select t.id, t.name, t.body, t.created_by, t.created_by_username, t.created_at, t.updated_by, t.updated_by_username, t.updated_at, t.project_id, t.project_name, t.alert_type_id, t.alert_type, t.started_at, t.schedule_interval, t.mute_consecutive_alerts, t.remind_interval, t.warning_interval, t.last_checked_at, t.last_reminded_at, t.create_next_submittal_from, t.instruments, t.alert_email_subscriptions +from v_alert_config t +inner join alert_config_instrument aci on t.id = aci.alert_config_id +where aci.instrument_id = $1 +order by t.name +` + +func (q *Queries) AlertConfigListForInstrument(ctx context.Context, instrumentID uuid.UUID) ([]VAlertConfig, error) { + rows, err := q.db.Query(ctx, alertConfigListForInstrument, instrumentID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []VAlertConfig{} + for rows.Next() { + var i VAlertConfig + if err := rows.Scan( + &i.ID, + &i.Name, + &i.Body, + &i.CreatedBy, + &i.CreatedByUsername, + &i.CreatedAt, + &i.UpdatedBy, + &i.UpdatedByUsername, + &i.UpdatedAt, + &i.ProjectID, + &i.ProjectName, + &i.AlertTypeID, + &i.AlertType, + &i.StartedAt, + &i.ScheduleInterval, + &i.MuteConsecutiveAlerts, + &i.RemindInterval, + &i.WarningInterval, + &i.LastCheckedAt, + &i.LastRemindedAt, + &i.CreateNextSubmittalFrom, + &i.Instruments, + &i.AlertEmailSubscriptions, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const alertConfigListForProject = `-- name: AlertConfigListForProject :many +select id, name, body, created_by, created_by_username, created_at, updated_by, updated_by_username, updated_at, project_id, project_name, alert_type_id, alert_type, started_at, schedule_interval, mute_consecutive_alerts, remind_interval, warning_interval, last_checked_at, last_reminded_at, create_next_submittal_from, instruments, alert_email_subscriptions +from v_alert_config +where project_id = $1 +order by name +` + +func (q *Queries) AlertConfigListForProject(ctx context.Context, projectID uuid.UUID) ([]VAlertConfig, error) { + rows, err := q.db.Query(ctx, alertConfigListForProject, projectID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []VAlertConfig{} + for rows.Next() { + var i VAlertConfig + if err := rows.Scan( + &i.ID, + &i.Name, + &i.Body, + &i.CreatedBy, + &i.CreatedByUsername, + &i.CreatedAt, + &i.UpdatedBy, + &i.UpdatedByUsername, + &i.UpdatedAt, + &i.ProjectID, + &i.ProjectName, + &i.AlertTypeID, + &i.AlertType, + &i.StartedAt, + &i.ScheduleInterval, + &i.MuteConsecutiveAlerts, + &i.RemindInterval, + &i.WarningInterval, + &i.LastCheckedAt, + &i.LastRemindedAt, + &i.CreateNextSubmittalFrom, + &i.Instruments, + &i.AlertEmailSubscriptions, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const alertConfigListForProjectAlertType = `-- name: AlertConfigListForProjectAlertType :many +select id, name, body, created_by, created_by_username, created_at, updated_by, updated_by_username, updated_at, project_id, project_name, alert_type_id, alert_type, started_at, schedule_interval, mute_consecutive_alerts, remind_interval, warning_interval, last_checked_at, last_reminded_at, create_next_submittal_from, instruments, alert_email_subscriptions +from v_alert_config +where project_id = $1 +and alert_type_id = $2 +order by name +` + +type AlertConfigListForProjectAlertTypeParams struct { + ProjectID uuid.UUID `json:"project_id"` + AlertTypeID uuid.UUID `json:"alert_type_id"` +} + +func (q *Queries) AlertConfigListForProjectAlertType(ctx context.Context, arg AlertConfigListForProjectAlertTypeParams) ([]VAlertConfig, error) { + rows, err := q.db.Query(ctx, alertConfigListForProjectAlertType, arg.ProjectID, arg.AlertTypeID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []VAlertConfig{} + for rows.Next() { + var i VAlertConfig + if err := rows.Scan( + &i.ID, + &i.Name, + &i.Body, + &i.CreatedBy, + &i.CreatedByUsername, + &i.CreatedAt, + &i.UpdatedBy, + &i.UpdatedByUsername, + &i.UpdatedAt, + &i.ProjectID, + &i.ProjectName, + &i.AlertTypeID, + &i.AlertType, + &i.StartedAt, + &i.ScheduleInterval, + &i.MuteConsecutiveAlerts, + &i.RemindInterval, + &i.WarningInterval, + &i.LastCheckedAt, + &i.LastRemindedAt, + &i.CreateNextSubmittalFrom, + &i.Instruments, + &i.AlertEmailSubscriptions, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const alertConfigUpdate = `-- name: AlertConfigUpdate :exec +update alert_config set + name = $3, + body = $4, + started_at = $5, + schedule_interval = $6, + mute_consecutive_alerts = $7, + remind_interval = $8, + warning_interval = $9, + updated_by = $10, + updated_at = $11 +where id = $1 and project_id = $2 +` + +type AlertConfigUpdateParams struct { + ID uuid.UUID `json:"id"` + ProjectID uuid.UUID `json:"project_id"` + Name string `json:"name"` + Body string `json:"body"` + StartedAt time.Time `json:"started_at"` + ScheduleInterval string `json:"schedule_interval"` + MuteConsecutiveAlerts bool `json:"mute_consecutive_alerts"` + RemindInterval string `json:"remind_interval"` + WarningInterval string `json:"warning_interval"` + UpdatedBy *uuid.UUID `json:"updated_by"` + UpdatedAt *time.Time `json:"updated_at"` +} + +func (q *Queries) AlertConfigUpdate(ctx context.Context, arg AlertConfigUpdateParams) error { + _, err := q.db.Exec(ctx, alertConfigUpdate, + arg.ID, + arg.ProjectID, + arg.Name, + arg.Body, + arg.StartedAt, + arg.ScheduleInterval, + arg.MuteConsecutiveAlerts, + arg.RemindInterval, + arg.WarningInterval, + arg.UpdatedBy, + arg.UpdatedAt, + ) + return err +} + +const submittalCreateNextFromExistingAlertConfigDate = `-- name: SubmittalCreateNextFromExistingAlertConfigDate :exec +insert into submittal (alert_config_id, due_at) +select ac.id, ac.created_at + ac.schedule_interval +from alert_config ac +where ac.id = $1 +` + +func (q *Queries) SubmittalCreateNextFromExistingAlertConfigDate(ctx context.Context, id uuid.UUID) error { + _, err := q.db.Exec(ctx, submittalCreateNextFromExistingAlertConfigDate, id) + return err +} + +const submittalUpdateNextForAlertConfig = `-- name: SubmittalUpdateNextForAlertConfig :one +update submittal +set due_at = sq.new_due_at +from ( + select + sub.id as submittal_id, + sub.created_at + ac.schedule_interval as new_due_at + from submittal sub + inner join alert_config ac on sub.alert_config_id = ac.id + where sub.alert_config_id = $1 + and sub.due_at > now() + and sub.completed_at is null + and not sub.marked_as_missing +) sq +where id = sq.submittal_id +and sq.new_due_at > now() +returning id +` + +func (q *Queries) SubmittalUpdateNextForAlertConfig(ctx context.Context, alertConfigID *uuid.UUID) (uuid.UUID, error) { + row := q.db.QueryRow(ctx, submittalUpdateNextForAlertConfig, alertConfigID) + var id uuid.UUID + err := row.Scan(&id) + return id, err +} diff --git a/api/internal/db/alert_measurement_check.sql_gen.go b/api/internal/db/alert_measurement_check.sql_gen.go new file mode 100644 index 00000000..12b6f30d --- /dev/null +++ b/api/internal/db/alert_measurement_check.sql_gen.go @@ -0,0 +1,81 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.27.0 +// source: alert_measurement_check.sql + +package db + +import ( + "context" +) + +const submittalListIncompleteEvaluation = `-- name: SubmittalListIncompleteEvaluation :many +select alert_config_id, submittal_id, submittal, should_warn, should_alert, should_remind from v_alert_check_evaluation_submittal +where submittal_id = any( + select id from submittal + where completed_at is null and not marked_as_missing +) +` + +func (q *Queries) SubmittalListIncompleteEvaluation(ctx context.Context) ([]VAlertCheckEvaluationSubmittal, error) { + rows, err := q.db.Query(ctx, submittalListIncompleteEvaluation) + if err != nil { + return nil, err + } + defer rows.Close() + items := []VAlertCheckEvaluationSubmittal{} + for rows.Next() { + var i VAlertCheckEvaluationSubmittal + if err := rows.Scan( + &i.AlertConfigID, + &i.SubmittalID, + &i.Submittal, + &i.ShouldWarn, + &i.ShouldAlert, + &i.ShouldRemind, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const submittalListIncompleteMeasurement = `-- name: SubmittalListIncompleteMeasurement :many +select alert_config_id, submittal_id, submittal, should_warn, should_alert, should_remind, affected_timeseries from v_alert_check_measurement_submittal +where submittal_id = any( + select id from submittal + where completed_at is null and not marked_as_missing +) +` + +func (q *Queries) SubmittalListIncompleteMeasurement(ctx context.Context) ([]VAlertCheckMeasurementSubmittal, error) { + rows, err := q.db.Query(ctx, submittalListIncompleteMeasurement) + if err != nil { + return nil, err + } + defer rows.Close() + items := []VAlertCheckMeasurementSubmittal{} + for rows.Next() { + var i VAlertCheckMeasurementSubmittal + if err := rows.Scan( + &i.AlertConfigID, + &i.SubmittalID, + &i.Submittal, + &i.ShouldWarn, + &i.ShouldAlert, + &i.ShouldRemind, + &i.AffectedTimeseries, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} diff --git a/api/internal/db/alert_subscription.sql_gen.go b/api/internal/db/alert_subscription.sql_gen.go new file mode 100644 index 00000000..81499c0f --- /dev/null +++ b/api/internal/db/alert_subscription.sql_gen.go @@ -0,0 +1,222 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.27.0 +// source: alert_subscription.sql + +package db + +import ( + "context" + + "github.com/google/uuid" +) + +const alertEmailSubscriptionCreate = `-- name: AlertEmailSubscriptionCreate :exec +insert into alert_email_subscription (alert_config_id, email_id) values ($1,$2) +on conflict on constraint email_unique_alert_config do nothing +` + +type AlertEmailSubscriptionCreateParams struct { + AlertConfigID uuid.UUID `json:"alert_config_id"` + EmailID uuid.UUID `json:"email_id"` +} + +func (q *Queries) AlertEmailSubscriptionCreate(ctx context.Context, arg AlertEmailSubscriptionCreateParams) error { + _, err := q.db.Exec(ctx, alertEmailSubscriptionCreate, arg.AlertConfigID, arg.EmailID) + return err +} + +const alertEmailSubscriptionDelete = `-- name: AlertEmailSubscriptionDelete :exec +delete from alert_email_subscription where alert_config_id = $1 and email_id = $2 +` + +type AlertEmailSubscriptionDeleteParams struct { + AlertConfigID uuid.UUID `json:"alert_config_id"` + EmailID uuid.UUID `json:"email_id"` +} + +func (q *Queries) AlertEmailSubscriptionDelete(ctx context.Context, arg AlertEmailSubscriptionDeleteParams) error { + _, err := q.db.Exec(ctx, alertEmailSubscriptionDelete, arg.AlertConfigID, arg.EmailID) + return err +} + +const alertEmailSubscritpionDeleteForAlertConfig = `-- name: AlertEmailSubscritpionDeleteForAlertConfig :exec +delete from alert_email_subscription where alert_config_id = $1 +` + +func (q *Queries) AlertEmailSubscritpionDeleteForAlertConfig(ctx context.Context, alertConfigID uuid.UUID) error { + _, err := q.db.Exec(ctx, alertEmailSubscritpionDeleteForAlertConfig, alertConfigID) + return err +} + +const alertProfileSubscriptionCreate = `-- name: AlertProfileSubscriptionCreate :exec +insert into alert_profile_subscription (alert_config_id, profile_id) values ($1,$2) +on conflict on constraint profile_unique_alert_config do nothing +` + +type AlertProfileSubscriptionCreateParams struct { + AlertConfigID uuid.UUID `json:"alert_config_id"` + ProfileID uuid.UUID `json:"profile_id"` +} + +func (q *Queries) AlertProfileSubscriptionCreate(ctx context.Context, arg AlertProfileSubscriptionCreateParams) error { + _, err := q.db.Exec(ctx, alertProfileSubscriptionCreate, arg.AlertConfigID, arg.ProfileID) + return err +} + +const alertProfileSubscriptionCreateOnAnyConflictDoNothing = `-- name: AlertProfileSubscriptionCreateOnAnyConflictDoNothing :exec +insert into alert_profile_subscription (alert_config_id, profile_id) +values ($1, $2) +on conflict do nothing +` + +type AlertProfileSubscriptionCreateOnAnyConflictDoNothingParams struct { + AlertConfigID uuid.UUID `json:"alert_config_id"` + ProfileID uuid.UUID `json:"profile_id"` +} + +func (q *Queries) AlertProfileSubscriptionCreateOnAnyConflictDoNothing(ctx context.Context, arg AlertProfileSubscriptionCreateOnAnyConflictDoNothingParams) error { + _, err := q.db.Exec(ctx, alertProfileSubscriptionCreateOnAnyConflictDoNothing, arg.AlertConfigID, arg.ProfileID) + return err +} + +const alertProfileSubscriptionDelete = `-- name: AlertProfileSubscriptionDelete :exec +delete from alert_profile_subscription where alert_config_id = $1 and profile_id = $2 +` + +type AlertProfileSubscriptionDeleteParams struct { + AlertConfigID uuid.UUID `json:"alert_config_id"` + ProfileID uuid.UUID `json:"profile_id"` +} + +func (q *Queries) AlertProfileSubscriptionDelete(ctx context.Context, arg AlertProfileSubscriptionDeleteParams) error { + _, err := q.db.Exec(ctx, alertProfileSubscriptionDelete, arg.AlertConfigID, arg.ProfileID) + return err +} + +const alertProfileSubscritpionDeleteForAlertConfig = `-- name: AlertProfileSubscritpionDeleteForAlertConfig :exec +delete from alert_profile_subscription where alert_config_id = $1 +` + +func (q *Queries) AlertProfileSubscritpionDeleteForAlertConfig(ctx context.Context, alertConfigID uuid.UUID) error { + _, err := q.db.Exec(ctx, alertProfileSubscritpionDeleteForAlertConfig, alertConfigID) + return err +} + +const alertSubscriptionGet = `-- name: AlertSubscriptionGet :one +select id, alert_config_id, profile_id, mute_ui, mute_notify from alert_profile_subscription where id = $1 +` + +func (q *Queries) AlertSubscriptionGet(ctx context.Context, id uuid.UUID) (AlertProfileSubscription, error) { + row := q.db.QueryRow(ctx, alertSubscriptionGet, id) + var i AlertProfileSubscription + err := row.Scan( + &i.ID, + &i.AlertConfigID, + &i.ProfileID, + &i.MuteUi, + &i.MuteNotify, + ) + return i, err +} + +const alertSubscriptionGetForAlertConfigProfile = `-- name: AlertSubscriptionGetForAlertConfigProfile :one +select id, alert_config_id, profile_id, mute_ui, mute_notify from alert_profile_subscription where alert_config_id = $1 and profile_id = $2 +` + +type AlertSubscriptionGetForAlertConfigProfileParams struct { + AlertConfigID uuid.UUID `json:"alert_config_id"` + ProfileID uuid.UUID `json:"profile_id"` +} + +func (q *Queries) AlertSubscriptionGetForAlertConfigProfile(ctx context.Context, arg AlertSubscriptionGetForAlertConfigProfileParams) (AlertProfileSubscription, error) { + row := q.db.QueryRow(ctx, alertSubscriptionGetForAlertConfigProfile, arg.AlertConfigID, arg.ProfileID) + var i AlertProfileSubscription + err := row.Scan( + &i.ID, + &i.AlertConfigID, + &i.ProfileID, + &i.MuteUi, + &i.MuteNotify, + ) + return i, err +} + +const alertSubscriptionListForProfile = `-- name: AlertSubscriptionListForProfile :many +select id, alert_config_id, profile_id, mute_ui, mute_notify from alert_profile_subscription where profile_id = $1 +` + +func (q *Queries) AlertSubscriptionListForProfile(ctx context.Context, profileID uuid.UUID) ([]AlertProfileSubscription, error) { + rows, err := q.db.Query(ctx, alertSubscriptionListForProfile, profileID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []AlertProfileSubscription{} + for rows.Next() { + var i AlertProfileSubscription + if err := rows.Scan( + &i.ID, + &i.AlertConfigID, + &i.ProfileID, + &i.MuteUi, + &i.MuteNotify, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const alertSubscriptionUpdateForProfile = `-- name: AlertSubscriptionUpdateForProfile :exec +update alert_profile_subscription set mute_ui=$1, mute_notify=$2 where alert_config_id=$3 and profile_id=$4 +` + +type AlertSubscriptionUpdateForProfileParams struct { + MuteUi bool `json:"mute_ui"` + MuteNotify bool `json:"mute_notify"` + AlertConfigID uuid.UUID `json:"alert_config_id"` + ProfileID uuid.UUID `json:"profile_id"` +} + +func (q *Queries) AlertSubscriptionUpdateForProfile(ctx context.Context, arg AlertSubscriptionUpdateForProfileParams) error { + _, err := q.db.Exec(ctx, alertSubscriptionUpdateForProfile, + arg.MuteUi, + arg.MuteNotify, + arg.AlertConfigID, + arg.ProfileID, + ) + return err +} + +const emailDelete = `-- name: EmailDelete :exec +delete from email where id = $1 +` + +func (q *Queries) EmailDelete(ctx context.Context, id uuid.UUID) error { + _, err := q.db.Exec(ctx, emailDelete, id) + return err +} + +const emailGetOrCreate = `-- name: EmailGetOrCreate :one +with e as ( + insert into email (email) values ($1) + on conflict on constraint unique_email do nothing + returning id +) +select id from e +union +select id from email where email = $1 +limit 1 +` + +func (q *Queries) EmailGetOrCreate(ctx context.Context, email string) (uuid.UUID, error) { + row := q.db.QueryRow(ctx, emailGetOrCreate, email) + var id uuid.UUID + err := row.Scan(&id) + return id, err +} diff --git a/api/internal/db/autocomplete.sql_gen.go b/api/internal/db/autocomplete.sql_gen.go new file mode 100644 index 00000000..82d99c90 --- /dev/null +++ b/api/internal/db/autocomplete.sql_gen.go @@ -0,0 +1,56 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.27.0 +// source: autocomplete.sql + +package db + +import ( + "context" + + "github.com/google/uuid" +) + +const emailAutocompleteList = `-- name: EmailAutocompleteList :many +select id, user_type, username, email +from v_email_autocomplete +where username_email ilike '%'||$1||'%' +limit $2 +` + +type EmailAutocompleteListParams struct { + SearchKeyword *string `json:"search_keyword"` + ResultLimit int32 `json:"result_limit"` +} + +type EmailAutocompleteListRow struct { + ID uuid.UUID `json:"id"` + UserType string `json:"user_type"` + Username interface{} `json:"username"` + Email string `json:"email"` +} + +func (q *Queries) EmailAutocompleteList(ctx context.Context, arg EmailAutocompleteListParams) ([]EmailAutocompleteListRow, error) { + rows, err := q.db.Query(ctx, emailAutocompleteList, arg.SearchKeyword, arg.ResultLimit) + if err != nil { + return nil, err + } + defer rows.Close() + items := []EmailAutocompleteListRow{} + for rows.Next() { + var i EmailAutocompleteListRow + if err := rows.Scan( + &i.ID, + &i.UserType, + &i.Username, + &i.Email, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} diff --git a/api/internal/db/aware.sql_gen.go b/api/internal/db/aware.sql_gen.go new file mode 100644 index 00000000..9b468c0a --- /dev/null +++ b/api/internal/db/aware.sql_gen.go @@ -0,0 +1,93 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.27.0 +// source: aware.sql + +package db + +import ( + "context" + + "github.com/google/uuid" +) + +const awareParameterList = `-- name: AwareParameterList :many +select id, key, parameter_id, unit_id from aware_parameter +` + +type AwareParameterListRow struct { + ID uuid.UUID `json:"id"` + Key string `json:"key"` + ParameterID uuid.UUID `json:"parameter_id"` + UnitID uuid.UUID `json:"unit_id"` +} + +func (q *Queries) AwareParameterList(ctx context.Context) ([]AwareParameterListRow, error) { + rows, err := q.db.Query(ctx, awareParameterList) + if err != nil { + return nil, err + } + defer rows.Close() + items := []AwareParameterListRow{} + for rows.Next() { + var i AwareParameterListRow + if err := rows.Scan( + &i.ID, + &i.Key, + &i.ParameterID, + &i.UnitID, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const awarePlatformCreate = `-- name: AwarePlatformCreate :exec +insert into aware_platform (instrument_id, aware_id) values ($1, $2) +` + +type AwarePlatformCreateParams struct { + InstrumentID *uuid.UUID `json:"instrument_id"` + AwareID uuid.UUID `json:"aware_id"` +} + +func (q *Queries) AwarePlatformCreate(ctx context.Context, arg AwarePlatformCreateParams) error { + _, err := q.db.Exec(ctx, awarePlatformCreate, arg.InstrumentID, arg.AwareID) + return err +} + +const awarePlatformParameterListEnabled = `-- name: AwarePlatformParameterListEnabled :many +select instrument_id, aware_id, aware_parameter_key, timeseries_id +from v_aware_platform_parameter_enabled +order by aware_id, aware_parameter_key +` + +func (q *Queries) AwarePlatformParameterListEnabled(ctx context.Context) ([]VAwarePlatformParameterEnabled, error) { + rows, err := q.db.Query(ctx, awarePlatformParameterListEnabled) + if err != nil { + return nil, err + } + defer rows.Close() + items := []VAwarePlatformParameterEnabled{} + for rows.Next() { + var i VAwarePlatformParameterEnabled + if err := rows.Scan( + &i.InstrumentID, + &i.AwareID, + &i.AwareParameterKey, + &i.TimeseriesID, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} diff --git a/api/internal/db/batch.go b/api/internal/db/batch.go new file mode 100644 index 00000000..2cc43e7f --- /dev/null +++ b/api/internal/db/batch.go @@ -0,0 +1,2414 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.27.0 +// source: batch.go + +package db + +import ( + "context" + "errors" + "time" + + "github.com/google/uuid" + "github.com/jackc/pgx/v5" + "github.com/jackc/pgx/v5/pgtype" +) + +var ( + ErrBatchAlreadyClosed = errors.New("batch already closed") +) + +const alertCreateBatch = `-- name: AlertCreateBatch :batchexec +insert into alert (alert_config_id) values ($1) +` + +type AlertCreateBatchBatchResults struct { + br pgx.BatchResults + tot int + closed bool +} + +func (q *Queries) AlertCreateBatch(ctx context.Context, alertConfigID []uuid.UUID) *AlertCreateBatchBatchResults { + batch := &pgx.Batch{} + for _, a := range alertConfigID { + vals := []interface{}{ + a, + } + batch.Queue(alertCreateBatch, vals...) + } + br := q.db.SendBatch(ctx, batch) + return &AlertCreateBatchBatchResults{br, len(alertConfigID), false} +} + +func (b *AlertCreateBatchBatchResults) Exec(f func(int, error)) { + defer b.br.Close() + for t := 0; t < b.tot; t++ { + if b.closed { + if f != nil { + f(t, ErrBatchAlreadyClosed) + } + continue + } + _, err := b.br.Exec() + if f != nil { + f(t, err) + } + } +} + +func (b *AlertCreateBatchBatchResults) Close() error { + b.closed = true + return b.br.Close() +} + +const awarePlatformCreateBatch = `-- name: AwarePlatformCreateBatch :batchexec +insert into aware_platform (instrument_id, aware_id) values ($1, $2) +` + +type AwarePlatformCreateBatchBatchResults struct { + br pgx.BatchResults + tot int + closed bool +} + +type AwarePlatformCreateBatchParams struct { + InstrumentID *uuid.UUID `json:"instrument_id"` + AwareID uuid.UUID `json:"aware_id"` +} + +func (q *Queries) AwarePlatformCreateBatch(ctx context.Context, arg []AwarePlatformCreateBatchParams) *AwarePlatformCreateBatchBatchResults { + batch := &pgx.Batch{} + for _, a := range arg { + vals := []interface{}{ + a.InstrumentID, + a.AwareID, + } + batch.Queue(awarePlatformCreateBatch, vals...) + } + br := q.db.SendBatch(ctx, batch) + return &AwarePlatformCreateBatchBatchResults{br, len(arg), false} +} + +func (b *AwarePlatformCreateBatchBatchResults) Exec(f func(int, error)) { + defer b.br.Close() + for t := 0; t < b.tot; t++ { + if b.closed { + if f != nil { + f(t, ErrBatchAlreadyClosed) + } + continue + } + _, err := b.br.Exec() + if f != nil { + f(t, err) + } + } +} + +func (b *AwarePlatformCreateBatchBatchResults) Close() error { + b.closed = true + return b.br.Close() +} + +const dataloggerErrorCreateBatch = `-- name: DataloggerErrorCreateBatch :batchexec +insert into datalogger_error (datalogger_table_id, error_message) +select dt.id, $3 from datalogger_table dt +where dt.datalogger_id = $1 and dt.table_name = $2 +and not exists ( + select 1 from datalogger_table sdt where sdt.datalogger_id = $1 and sdt.table_name = $2 +) +` + +type DataloggerErrorCreateBatchBatchResults struct { + br pgx.BatchResults + tot int + closed bool +} + +type DataloggerErrorCreateBatchParams struct { + DataloggerID uuid.UUID `json:"datalogger_id"` + TableName string `json:"table_name"` + ErrorMessage *string `json:"error_message"` +} + +func (q *Queries) DataloggerErrorCreateBatch(ctx context.Context, arg []DataloggerErrorCreateBatchParams) *DataloggerErrorCreateBatchBatchResults { + batch := &pgx.Batch{} + for _, a := range arg { + vals := []interface{}{ + a.DataloggerID, + a.TableName, + a.ErrorMessage, + } + batch.Queue(dataloggerErrorCreateBatch, vals...) + } + br := q.db.SendBatch(ctx, batch) + return &DataloggerErrorCreateBatchBatchResults{br, len(arg), false} +} + +func (b *DataloggerErrorCreateBatchBatchResults) Exec(f func(int, error)) { + defer b.br.Close() + for t := 0; t < b.tot; t++ { + if b.closed { + if f != nil { + f(t, ErrBatchAlreadyClosed) + } + continue + } + _, err := b.br.Exec() + if f != nil { + f(t, err) + } + } +} + +func (b *DataloggerErrorCreateBatchBatchResults) Close() error { + b.closed = true + return b.br.Close() +} + +const evaluationInstrumentCreateBatch = `-- name: EvaluationInstrumentCreateBatch :batchexec +insert into evaluation_instrument (evaluation_id, instrument_id) values ($1,$2) +` + +type EvaluationInstrumentCreateBatchBatchResults struct { + br pgx.BatchResults + tot int + closed bool +} + +type EvaluationInstrumentCreateBatchParams struct { + EvaluationID *uuid.UUID `json:"evaluation_id"` + InstrumentID *uuid.UUID `json:"instrument_id"` +} + +func (q *Queries) EvaluationInstrumentCreateBatch(ctx context.Context, arg []EvaluationInstrumentCreateBatchParams) *EvaluationInstrumentCreateBatchBatchResults { + batch := &pgx.Batch{} + for _, a := range arg { + vals := []interface{}{ + a.EvaluationID, + a.InstrumentID, + } + batch.Queue(evaluationInstrumentCreateBatch, vals...) + } + br := q.db.SendBatch(ctx, batch) + return &EvaluationInstrumentCreateBatchBatchResults{br, len(arg), false} +} + +func (b *EvaluationInstrumentCreateBatchBatchResults) Exec(f func(int, error)) { + defer b.br.Close() + for t := 0; t < b.tot; t++ { + if b.closed { + if f != nil { + f(t, ErrBatchAlreadyClosed) + } + continue + } + _, err := b.br.Exec() + if f != nil { + f(t, err) + } + } +} + +func (b *EvaluationInstrumentCreateBatchBatchResults) Close() error { + b.closed = true + return b.br.Close() +} + +const inclOptsCreateBatch = `-- name: InclOptsCreateBatch :batchexec +insert into incl_opts (instrument_id, num_segments, bottom_elevation_timeseries_id, initial_time) +values ($1, $2, $3, $4) +` + +type InclOptsCreateBatchBatchResults struct { + br pgx.BatchResults + tot int + closed bool +} + +type InclOptsCreateBatchParams struct { + InstrumentID uuid.UUID `json:"instrument_id"` + NumSegments int32 `json:"num_segments"` + BottomElevationTimeseriesID *uuid.UUID `json:"bottom_elevation_timeseries_id"` + InitialTime *time.Time `json:"initial_time"` +} + +func (q *Queries) InclOptsCreateBatch(ctx context.Context, arg []InclOptsCreateBatchParams) *InclOptsCreateBatchBatchResults { + batch := &pgx.Batch{} + for _, a := range arg { + vals := []interface{}{ + a.InstrumentID, + a.NumSegments, + a.BottomElevationTimeseriesID, + a.InitialTime, + } + batch.Queue(inclOptsCreateBatch, vals...) + } + br := q.db.SendBatch(ctx, batch) + return &InclOptsCreateBatchBatchResults{br, len(arg), false} +} + +func (b *InclOptsCreateBatchBatchResults) Exec(f func(int, error)) { + defer b.br.Close() + for t := 0; t < b.tot; t++ { + if b.closed { + if f != nil { + f(t, ErrBatchAlreadyClosed) + } + continue + } + _, err := b.br.Exec() + if f != nil { + f(t, err) + } + } +} + +func (b *InclOptsCreateBatchBatchResults) Close() error { + b.closed = true + return b.br.Close() +} + +const inclOptsUpdateBatch = `-- name: InclOptsUpdateBatch :batchexec +update incl_opts set + bottom_elevation_timeseries_id = $2, + initial_time = $3 +where instrument_id = $1 +` + +type InclOptsUpdateBatchBatchResults struct { + br pgx.BatchResults + tot int + closed bool +} + +type InclOptsUpdateBatchParams struct { + InstrumentID uuid.UUID `json:"instrument_id"` + BottomElevationTimeseriesID *uuid.UUID `json:"bottom_elevation_timeseries_id"` + InitialTime *time.Time `json:"initial_time"` +} + +func (q *Queries) InclOptsUpdateBatch(ctx context.Context, arg []InclOptsUpdateBatchParams) *InclOptsUpdateBatchBatchResults { + batch := &pgx.Batch{} + for _, a := range arg { + vals := []interface{}{ + a.InstrumentID, + a.BottomElevationTimeseriesID, + a.InitialTime, + } + batch.Queue(inclOptsUpdateBatch, vals...) + } + br := q.db.SendBatch(ctx, batch) + return &InclOptsUpdateBatchBatchResults{br, len(arg), false} +} + +func (b *InclOptsUpdateBatchBatchResults) Exec(f func(int, error)) { + defer b.br.Close() + for t := 0; t < b.tot; t++ { + if b.closed { + if f != nil { + f(t, ErrBatchAlreadyClosed) + } + continue + } + _, err := b.br.Exec() + if f != nil { + f(t, err) + } + } +} + +func (b *InclOptsUpdateBatchBatchResults) Close() error { + b.closed = true + return b.br.Close() +} + +const inclSegmentCreateBatch = `-- name: InclSegmentCreateBatch :batchexec +insert into incl_segment ( + id, + instrument_id, + depth_timeseries_id, + a0_timeseries_id, + a180_timeseries_id, + b0_timeseries_id, + b180_timeseries_id +) values ($1, $2, $3, $4, $5, $6, $7) +` + +type InclSegmentCreateBatchBatchResults struct { + br pgx.BatchResults + tot int + closed bool +} + +type InclSegmentCreateBatchParams struct { + ID int32 `json:"id"` + InstrumentID uuid.UUID `json:"instrument_id"` + DepthTimeseriesID *uuid.UUID `json:"depth_timeseries_id"` + A0TimeseriesID *uuid.UUID `json:"a0_timeseries_id"` + A180TimeseriesID *uuid.UUID `json:"a180_timeseries_id"` + B0TimeseriesID *uuid.UUID `json:"b0_timeseries_id"` + B180TimeseriesID *uuid.UUID `json:"b180_timeseries_id"` +} + +func (q *Queries) InclSegmentCreateBatch(ctx context.Context, arg []InclSegmentCreateBatchParams) *InclSegmentCreateBatchBatchResults { + batch := &pgx.Batch{} + for _, a := range arg { + vals := []interface{}{ + a.ID, + a.InstrumentID, + a.DepthTimeseriesID, + a.A0TimeseriesID, + a.A180TimeseriesID, + a.B0TimeseriesID, + a.B180TimeseriesID, + } + batch.Queue(inclSegmentCreateBatch, vals...) + } + br := q.db.SendBatch(ctx, batch) + return &InclSegmentCreateBatchBatchResults{br, len(arg), false} +} + +func (b *InclSegmentCreateBatchBatchResults) Exec(f func(int, error)) { + defer b.br.Close() + for t := 0; t < b.tot; t++ { + if b.closed { + if f != nil { + f(t, ErrBatchAlreadyClosed) + } + continue + } + _, err := b.br.Exec() + if f != nil { + f(t, err) + } + } +} + +func (b *InclSegmentCreateBatchBatchResults) Close() error { + b.closed = true + return b.br.Close() +} + +const inclSegmentUpdateBatch = `-- name: InclSegmentUpdateBatch :batchexec +update incl_segment set + depth_timeseries_id=$3, + a0_timeseries_id=$4, + a180_timeseries_id=$5, + b0_timeseries_id=$6, + b180_timeseries_id=$7 +where id = $1 and instrument_id = $2 +` + +type InclSegmentUpdateBatchBatchResults struct { + br pgx.BatchResults + tot int + closed bool +} + +type InclSegmentUpdateBatchParams struct { + ID int32 `json:"id"` + InstrumentID uuid.UUID `json:"instrument_id"` + DepthTimeseriesID *uuid.UUID `json:"depth_timeseries_id"` + A0TimeseriesID *uuid.UUID `json:"a0_timeseries_id"` + A180TimeseriesID *uuid.UUID `json:"a180_timeseries_id"` + B0TimeseriesID *uuid.UUID `json:"b0_timeseries_id"` + B180TimeseriesID *uuid.UUID `json:"b180_timeseries_id"` +} + +func (q *Queries) InclSegmentUpdateBatch(ctx context.Context, arg []InclSegmentUpdateBatchParams) *InclSegmentUpdateBatchBatchResults { + batch := &pgx.Batch{} + for _, a := range arg { + vals := []interface{}{ + a.ID, + a.InstrumentID, + a.DepthTimeseriesID, + a.A0TimeseriesID, + a.A180TimeseriesID, + a.B0TimeseriesID, + a.B180TimeseriesID, + } + batch.Queue(inclSegmentUpdateBatch, vals...) + } + br := q.db.SendBatch(ctx, batch) + return &InclSegmentUpdateBatchBatchResults{br, len(arg), false} +} + +func (b *InclSegmentUpdateBatchBatchResults) Exec(f func(int, error)) { + defer b.br.Close() + for t := 0; t < b.tot; t++ { + if b.closed { + if f != nil { + f(t, ErrBatchAlreadyClosed) + } + continue + } + _, err := b.br.Exec() + if f != nil { + f(t, err) + } + } +} + +func (b *InclSegmentUpdateBatchBatchResults) Close() error { + b.closed = true + return b.br.Close() +} + +const instrumentConstantCreateBatch = `-- name: InstrumentConstantCreateBatch :batchexec +insert into instrument_constants (instrument_id, timeseries_id) values ($1, $2) +` + +type InstrumentConstantCreateBatchBatchResults struct { + br pgx.BatchResults + tot int + closed bool +} + +type InstrumentConstantCreateBatchParams struct { + InstrumentID uuid.UUID `json:"instrument_id"` + TimeseriesID uuid.UUID `json:"timeseries_id"` +} + +func (q *Queries) InstrumentConstantCreateBatch(ctx context.Context, arg []InstrumentConstantCreateBatchParams) *InstrumentConstantCreateBatchBatchResults { + batch := &pgx.Batch{} + for _, a := range arg { + vals := []interface{}{ + a.InstrumentID, + a.TimeseriesID, + } + batch.Queue(instrumentConstantCreateBatch, vals...) + } + br := q.db.SendBatch(ctx, batch) + return &InstrumentConstantCreateBatchBatchResults{br, len(arg), false} +} + +func (b *InstrumentConstantCreateBatchBatchResults) Exec(f func(int, error)) { + defer b.br.Close() + for t := 0; t < b.tot; t++ { + if b.closed { + if f != nil { + f(t, ErrBatchAlreadyClosed) + } + continue + } + _, err := b.br.Exec() + if f != nil { + f(t, err) + } + } +} + +func (b *InstrumentConstantCreateBatchBatchResults) Close() error { + b.closed = true + return b.br.Close() +} + +const instrumentCreateBatch = `-- name: InstrumentCreateBatch :batchone +insert into instrument (slug, name, type_id, geometry, station, station_offset, created_by, created_at, nid_id, usgs_id, show_cwms_tab) +values ( + slugify($1, 'instrument'), + $1, + $2, + ST_SetSRID(ST_GeomFromGeoJSON($3::json), 4326), + $4, + $5, + $6, + $7, + $8, + $9, + $10 +) +returning id, slug +` + +type InstrumentCreateBatchBatchResults struct { + br pgx.BatchResults + tot int + closed bool +} + +type InstrumentCreateBatchParams struct { + Name string `json:"name"` + TypeID uuid.UUID `json:"type_id"` + Geometry []byte `json:"geometry"` + Station *int32 `json:"station"` + StationOffset *int32 `json:"station_offset"` + CreatedBy uuid.UUID `json:"created_by"` + CreatedAt time.Time `json:"created_at"` + NidID *string `json:"nid_id"` + UsgsID *string `json:"usgs_id"` + ShowCwmsTab bool `json:"show_cwms_tab"` +} + +type InstrumentCreateBatchRow struct { + ID uuid.UUID `json:"id"` + Slug string `json:"slug"` +} + +func (q *Queries) InstrumentCreateBatch(ctx context.Context, arg []InstrumentCreateBatchParams) *InstrumentCreateBatchBatchResults { + batch := &pgx.Batch{} + for _, a := range arg { + vals := []interface{}{ + a.Name, + a.TypeID, + a.Geometry, + a.Station, + a.StationOffset, + a.CreatedBy, + a.CreatedAt, + a.NidID, + a.UsgsID, + a.ShowCwmsTab, + } + batch.Queue(instrumentCreateBatch, vals...) + } + br := q.db.SendBatch(ctx, batch) + return &InstrumentCreateBatchBatchResults{br, len(arg), false} +} + +func (b *InstrumentCreateBatchBatchResults) QueryRow(f func(int, InstrumentCreateBatchRow, error)) { + defer b.br.Close() + for t := 0; t < b.tot; t++ { + var i InstrumentCreateBatchRow + if b.closed { + if f != nil { + f(t, i, ErrBatchAlreadyClosed) + } + continue + } + row := b.br.QueryRow() + err := row.Scan(&i.ID, &i.Slug) + if f != nil { + f(t, i, err) + } + } +} + +func (b *InstrumentCreateBatchBatchResults) Close() error { + b.closed = true + return b.br.Close() +} + +const instrumentGroupCreateBatch = `-- name: InstrumentGroupCreateBatch :batchone +insert into instrument_group (slug, name, description, created_by, created_at, project_id) +values (slugify($1, 'instrument_group'), $1, $2, $3, $4, $5) +returning id, slug, name, description, created_by, created_at, updated_by, updated_at, project_id +` + +type InstrumentGroupCreateBatchBatchResults struct { + br pgx.BatchResults + tot int + closed bool +} + +type InstrumentGroupCreateBatchParams struct { + Name string `json:"name"` + Description *string `json:"description"` + CreatedBy uuid.UUID `json:"created_by"` + CreatedAt time.Time `json:"created_at"` + ProjectID *uuid.UUID `json:"project_id"` +} + +type InstrumentGroupCreateBatchRow struct { + ID uuid.UUID `json:"id"` + Slug string `json:"slug"` + Name string `json:"name"` + Description *string `json:"description"` + CreatedBy uuid.UUID `json:"created_by"` + CreatedAt time.Time `json:"created_at"` + UpdatedBy *uuid.UUID `json:"updated_by"` + UpdatedAt *time.Time `json:"updated_at"` + ProjectID *uuid.UUID `json:"project_id"` +} + +func (q *Queries) InstrumentGroupCreateBatch(ctx context.Context, arg []InstrumentGroupCreateBatchParams) *InstrumentGroupCreateBatchBatchResults { + batch := &pgx.Batch{} + for _, a := range arg { + vals := []interface{}{ + a.Name, + a.Description, + a.CreatedBy, + a.CreatedAt, + a.ProjectID, + } + batch.Queue(instrumentGroupCreateBatch, vals...) + } + br := q.db.SendBatch(ctx, batch) + return &InstrumentGroupCreateBatchBatchResults{br, len(arg), false} +} + +func (b *InstrumentGroupCreateBatchBatchResults) QueryRow(f func(int, InstrumentGroupCreateBatchRow, error)) { + defer b.br.Close() + for t := 0; t < b.tot; t++ { + var i InstrumentGroupCreateBatchRow + if b.closed { + if f != nil { + f(t, i, ErrBatchAlreadyClosed) + } + continue + } + row := b.br.QueryRow() + err := row.Scan( + &i.ID, + &i.Slug, + &i.Name, + &i.Description, + &i.CreatedBy, + &i.CreatedAt, + &i.UpdatedBy, + &i.UpdatedAt, + &i.ProjectID, + ) + if f != nil { + f(t, i, err) + } + } +} + +func (b *InstrumentGroupCreateBatchBatchResults) Close() error { + b.closed = true + return b.br.Close() +} + +const instrumentNoteCreateBatch = `-- name: InstrumentNoteCreateBatch :batchone +insert into instrument_note (instrument_id, title, body, time, created_by, created_at) +values ($1, $2, $3, $4, $5, $6) +returning id, instrument_id, title, body, time, created_by, created_at, updated_by, updated_at +` + +type InstrumentNoteCreateBatchBatchResults struct { + br pgx.BatchResults + tot int + closed bool +} + +type InstrumentNoteCreateBatchParams struct { + InstrumentID uuid.UUID `json:"instrument_id"` + Title string `json:"title"` + Body string `json:"body"` + Time time.Time `json:"time"` + CreatedBy uuid.UUID `json:"created_by"` + CreatedAt time.Time `json:"created_at"` +} + +func (q *Queries) InstrumentNoteCreateBatch(ctx context.Context, arg []InstrumentNoteCreateBatchParams) *InstrumentNoteCreateBatchBatchResults { + batch := &pgx.Batch{} + for _, a := range arg { + vals := []interface{}{ + a.InstrumentID, + a.Title, + a.Body, + a.Time, + a.CreatedBy, + a.CreatedAt, + } + batch.Queue(instrumentNoteCreateBatch, vals...) + } + br := q.db.SendBatch(ctx, batch) + return &InstrumentNoteCreateBatchBatchResults{br, len(arg), false} +} + +func (b *InstrumentNoteCreateBatchBatchResults) QueryRow(f func(int, InstrumentNote, error)) { + defer b.br.Close() + for t := 0; t < b.tot; t++ { + var i InstrumentNote + if b.closed { + if f != nil { + f(t, i, ErrBatchAlreadyClosed) + } + continue + } + row := b.br.QueryRow() + err := row.Scan( + &i.ID, + &i.InstrumentID, + &i.Title, + &i.Body, + &i.Time, + &i.CreatedBy, + &i.CreatedAt, + &i.UpdatedBy, + &i.UpdatedAt, + ) + if f != nil { + f(t, i, err) + } + } +} + +func (b *InstrumentNoteCreateBatchBatchResults) Close() error { + b.closed = true + return b.br.Close() +} + +const instrumentStatusCreateOrUpdateBatch = `-- name: InstrumentStatusCreateOrUpdateBatch :batchexec +insert into instrument_status (instrument_id, status_id, time) values ($1, $2, $3) +on conflict on constraint instrument_unique_status_in_time do update set status_id = excluded.status_id +` + +type InstrumentStatusCreateOrUpdateBatchBatchResults struct { + br pgx.BatchResults + tot int + closed bool +} + +type InstrumentStatusCreateOrUpdateBatchParams struct { + InstrumentID uuid.UUID `json:"instrument_id"` + StatusID uuid.UUID `json:"status_id"` + Time time.Time `json:"time"` +} + +func (q *Queries) InstrumentStatusCreateOrUpdateBatch(ctx context.Context, arg []InstrumentStatusCreateOrUpdateBatchParams) *InstrumentStatusCreateOrUpdateBatchBatchResults { + batch := &pgx.Batch{} + for _, a := range arg { + vals := []interface{}{ + a.InstrumentID, + a.StatusID, + a.Time, + } + batch.Queue(instrumentStatusCreateOrUpdateBatch, vals...) + } + br := q.db.SendBatch(ctx, batch) + return &InstrumentStatusCreateOrUpdateBatchBatchResults{br, len(arg), false} +} + +func (b *InstrumentStatusCreateOrUpdateBatchBatchResults) Exec(f func(int, error)) { + defer b.br.Close() + for t := 0; t < b.tot; t++ { + if b.closed { + if f != nil { + f(t, ErrBatchAlreadyClosed) + } + continue + } + _, err := b.br.Exec() + if f != nil { + f(t, err) + } + } +} + +func (b *InstrumentStatusCreateOrUpdateBatchBatchResults) Close() error { + b.closed = true + return b.br.Close() +} + +const ipiOptsCreateBatch = `-- name: IpiOptsCreateBatch :batchexec +insert into ipi_opts (instrument_id, num_segments, bottom_elevation_timeseries_id, initial_time) +values ($1, $2, $3, $4) +` + +type IpiOptsCreateBatchBatchResults struct { + br pgx.BatchResults + tot int + closed bool +} + +type IpiOptsCreateBatchParams struct { + InstrumentID uuid.UUID `json:"instrument_id"` + NumSegments int32 `json:"num_segments"` + BottomElevationTimeseriesID *uuid.UUID `json:"bottom_elevation_timeseries_id"` + InitialTime *time.Time `json:"initial_time"` +} + +func (q *Queries) IpiOptsCreateBatch(ctx context.Context, arg []IpiOptsCreateBatchParams) *IpiOptsCreateBatchBatchResults { + batch := &pgx.Batch{} + for _, a := range arg { + vals := []interface{}{ + a.InstrumentID, + a.NumSegments, + a.BottomElevationTimeseriesID, + a.InitialTime, + } + batch.Queue(ipiOptsCreateBatch, vals...) + } + br := q.db.SendBatch(ctx, batch) + return &IpiOptsCreateBatchBatchResults{br, len(arg), false} +} + +func (b *IpiOptsCreateBatchBatchResults) Exec(f func(int, error)) { + defer b.br.Close() + for t := 0; t < b.tot; t++ { + if b.closed { + if f != nil { + f(t, ErrBatchAlreadyClosed) + } + continue + } + _, err := b.br.Exec() + if f != nil { + f(t, err) + } + } +} + +func (b *IpiOptsCreateBatchBatchResults) Close() error { + b.closed = true + return b.br.Close() +} + +const ipiOptsUpdateBatch = `-- name: IpiOptsUpdateBatch :batchexec +update ipi_opts set + bottom_elevation_timeseries_id = $2, + initial_time = $3 +where instrument_id = $1 +` + +type IpiOptsUpdateBatchBatchResults struct { + br pgx.BatchResults + tot int + closed bool +} + +type IpiOptsUpdateBatchParams struct { + InstrumentID uuid.UUID `json:"instrument_id"` + BottomElevationTimeseriesID *uuid.UUID `json:"bottom_elevation_timeseries_id"` + InitialTime *time.Time `json:"initial_time"` +} + +func (q *Queries) IpiOptsUpdateBatch(ctx context.Context, arg []IpiOptsUpdateBatchParams) *IpiOptsUpdateBatchBatchResults { + batch := &pgx.Batch{} + for _, a := range arg { + vals := []interface{}{ + a.InstrumentID, + a.BottomElevationTimeseriesID, + a.InitialTime, + } + batch.Queue(ipiOptsUpdateBatch, vals...) + } + br := q.db.SendBatch(ctx, batch) + return &IpiOptsUpdateBatchBatchResults{br, len(arg), false} +} + +func (b *IpiOptsUpdateBatchBatchResults) Exec(f func(int, error)) { + defer b.br.Close() + for t := 0; t < b.tot; t++ { + if b.closed { + if f != nil { + f(t, ErrBatchAlreadyClosed) + } + continue + } + _, err := b.br.Exec() + if f != nil { + f(t, err) + } + } +} + +func (b *IpiOptsUpdateBatchBatchResults) Close() error { + b.closed = true + return b.br.Close() +} + +const ipiSegmentCreateBatch = `-- name: IpiSegmentCreateBatch :batchexec +insert into ipi_segment ( + id, + instrument_id, + length_timeseries_id, + tilt_timeseries_id, + inc_dev_timeseries_id, + temp_timeseries_id +) values ($1, $2, $3, $4, $5, $6) +` + +type IpiSegmentCreateBatchBatchResults struct { + br pgx.BatchResults + tot int + closed bool +} + +type IpiSegmentCreateBatchParams struct { + ID int32 `json:"id"` + InstrumentID uuid.UUID `json:"instrument_id"` + LengthTimeseriesID *uuid.UUID `json:"length_timeseries_id"` + TiltTimeseriesID *uuid.UUID `json:"tilt_timeseries_id"` + IncDevTimeseriesID *uuid.UUID `json:"inc_dev_timeseries_id"` + TempTimeseriesID *uuid.UUID `json:"temp_timeseries_id"` +} + +func (q *Queries) IpiSegmentCreateBatch(ctx context.Context, arg []IpiSegmentCreateBatchParams) *IpiSegmentCreateBatchBatchResults { + batch := &pgx.Batch{} + for _, a := range arg { + vals := []interface{}{ + a.ID, + a.InstrumentID, + a.LengthTimeseriesID, + a.TiltTimeseriesID, + a.IncDevTimeseriesID, + a.TempTimeseriesID, + } + batch.Queue(ipiSegmentCreateBatch, vals...) + } + br := q.db.SendBatch(ctx, batch) + return &IpiSegmentCreateBatchBatchResults{br, len(arg), false} +} + +func (b *IpiSegmentCreateBatchBatchResults) Exec(f func(int, error)) { + defer b.br.Close() + for t := 0; t < b.tot; t++ { + if b.closed { + if f != nil { + f(t, ErrBatchAlreadyClosed) + } + continue + } + _, err := b.br.Exec() + if f != nil { + f(t, err) + } + } +} + +func (b *IpiSegmentCreateBatchBatchResults) Close() error { + b.closed = true + return b.br.Close() +} + +const ipiSegmentUpdateBatch = `-- name: IpiSegmentUpdateBatch :batchexec +update ipi_segment set + length_timeseries_id = $3, + tilt_timeseries_id = $4, + inc_dev_timeseries_id = $5, + temp_timeseries_id = $6 +where id = $1 and instrument_id = $2 +` + +type IpiSegmentUpdateBatchBatchResults struct { + br pgx.BatchResults + tot int + closed bool +} + +type IpiSegmentUpdateBatchParams struct { + ID int32 `json:"id"` + InstrumentID uuid.UUID `json:"instrument_id"` + LengthTimeseriesID *uuid.UUID `json:"length_timeseries_id"` + TiltTimeseriesID *uuid.UUID `json:"tilt_timeseries_id"` + IncDevTimeseriesID *uuid.UUID `json:"inc_dev_timeseries_id"` + TempTimeseriesID *uuid.UUID `json:"temp_timeseries_id"` +} + +func (q *Queries) IpiSegmentUpdateBatch(ctx context.Context, arg []IpiSegmentUpdateBatchParams) *IpiSegmentUpdateBatchBatchResults { + batch := &pgx.Batch{} + for _, a := range arg { + vals := []interface{}{ + a.ID, + a.InstrumentID, + a.LengthTimeseriesID, + a.TiltTimeseriesID, + a.IncDevTimeseriesID, + a.TempTimeseriesID, + } + batch.Queue(ipiSegmentUpdateBatch, vals...) + } + br := q.db.SendBatch(ctx, batch) + return &IpiSegmentUpdateBatchBatchResults{br, len(arg), false} +} + +func (b *IpiSegmentUpdateBatchBatchResults) Exec(f func(int, error)) { + defer b.br.Close() + for t := 0; t < b.tot; t++ { + if b.closed { + if f != nil { + f(t, ErrBatchAlreadyClosed) + } + continue + } + _, err := b.br.Exec() + if f != nil { + f(t, err) + } + } +} + +func (b *IpiSegmentUpdateBatchBatchResults) Close() error { + b.closed = true + return b.br.Close() +} + +const plotConfigCustomShapeCreateBatch = `-- name: PlotConfigCustomShapeCreateBatch :batchexec +insert into plot_configuration_custom_shape +(plot_configuration_id, enabled, name, data_point, color) values ($1, $2, $3, $4, $5) +` + +type PlotConfigCustomShapeCreateBatchBatchResults struct { + br pgx.BatchResults + tot int + closed bool +} + +type PlotConfigCustomShapeCreateBatchParams struct { + PlotConfigurationID *uuid.UUID `json:"plot_configuration_id"` + Enabled bool `json:"enabled"` + Name string `json:"name"` + DataPoint float32 `json:"data_point"` + Color string `json:"color"` +} + +func (q *Queries) PlotConfigCustomShapeCreateBatch(ctx context.Context, arg []PlotConfigCustomShapeCreateBatchParams) *PlotConfigCustomShapeCreateBatchBatchResults { + batch := &pgx.Batch{} + for _, a := range arg { + vals := []interface{}{ + a.PlotConfigurationID, + a.Enabled, + a.Name, + a.DataPoint, + a.Color, + } + batch.Queue(plotConfigCustomShapeCreateBatch, vals...) + } + br := q.db.SendBatch(ctx, batch) + return &PlotConfigCustomShapeCreateBatchBatchResults{br, len(arg), false} +} + +func (b *PlotConfigCustomShapeCreateBatchBatchResults) Exec(f func(int, error)) { + defer b.br.Close() + for t := 0; t < b.tot; t++ { + if b.closed { + if f != nil { + f(t, ErrBatchAlreadyClosed) + } + continue + } + _, err := b.br.Exec() + if f != nil { + f(t, err) + } + } +} + +func (b *PlotConfigCustomShapeCreateBatchBatchResults) Close() error { + b.closed = true + return b.br.Close() +} + +const plotConfigTimeseriesTracesCreateBatch = `-- name: PlotConfigTimeseriesTracesCreateBatch :batchexec +insert into plot_configuration_timeseries_trace +(plot_configuration_id, timeseries_id, trace_order, color, line_style, width, show_markers, y_axis) values +($1, $2, $3, $4, $5, $6, $7, $8) +` + +type PlotConfigTimeseriesTracesCreateBatchBatchResults struct { + br pgx.BatchResults + tot int + closed bool +} + +type PlotConfigTimeseriesTracesCreateBatchParams struct { + PlotConfigurationID *uuid.UUID `json:"plot_configuration_id"` + TimeseriesID *uuid.UUID `json:"timeseries_id"` + TraceOrder int32 `json:"trace_order"` + Color string `json:"color"` + LineStyle LineStyle `json:"line_style"` + Width float32 `json:"width"` + ShowMarkers bool `json:"show_markers"` + YAxis YAxis `json:"y_axis"` +} + +func (q *Queries) PlotConfigTimeseriesTracesCreateBatch(ctx context.Context, arg []PlotConfigTimeseriesTracesCreateBatchParams) *PlotConfigTimeseriesTracesCreateBatchBatchResults { + batch := &pgx.Batch{} + for _, a := range arg { + vals := []interface{}{ + a.PlotConfigurationID, + a.TimeseriesID, + a.TraceOrder, + a.Color, + a.LineStyle, + a.Width, + a.ShowMarkers, + a.YAxis, + } + batch.Queue(plotConfigTimeseriesTracesCreateBatch, vals...) + } + br := q.db.SendBatch(ctx, batch) + return &PlotConfigTimeseriesTracesCreateBatchBatchResults{br, len(arg), false} +} + +func (b *PlotConfigTimeseriesTracesCreateBatchBatchResults) Exec(f func(int, error)) { + defer b.br.Close() + for t := 0; t < b.tot; t++ { + if b.closed { + if f != nil { + f(t, ErrBatchAlreadyClosed) + } + continue + } + _, err := b.br.Exec() + if f != nil { + f(t, err) + } + } +} + +func (b *PlotConfigTimeseriesTracesCreateBatchBatchResults) Close() error { + b.closed = true + return b.br.Close() +} + +const plotContourConfigTimeseriesCreateBatch = `-- name: PlotContourConfigTimeseriesCreateBatch :batchexec +insert into plot_contour_config_timeseries (plot_contour_config_id, timeseries_id) values ($1, $2) +on conflict (plot_contour_config_id, timeseries_id) do nothing +` + +type PlotContourConfigTimeseriesCreateBatchBatchResults struct { + br pgx.BatchResults + tot int + closed bool +} + +type PlotContourConfigTimeseriesCreateBatchParams struct { + PlotContourConfigID uuid.UUID `json:"plot_contour_config_id"` + TimeseriesID uuid.UUID `json:"timeseries_id"` +} + +func (q *Queries) PlotContourConfigTimeseriesCreateBatch(ctx context.Context, arg []PlotContourConfigTimeseriesCreateBatchParams) *PlotContourConfigTimeseriesCreateBatchBatchResults { + batch := &pgx.Batch{} + for _, a := range arg { + vals := []interface{}{ + a.PlotContourConfigID, + a.TimeseriesID, + } + batch.Queue(plotContourConfigTimeseriesCreateBatch, vals...) + } + br := q.db.SendBatch(ctx, batch) + return &PlotContourConfigTimeseriesCreateBatchBatchResults{br, len(arg), false} +} + +func (b *PlotContourConfigTimeseriesCreateBatchBatchResults) Exec(f func(int, error)) { + defer b.br.Close() + for t := 0; t < b.tot; t++ { + if b.closed { + if f != nil { + f(t, ErrBatchAlreadyClosed) + } + continue + } + _, err := b.br.Exec() + if f != nil { + f(t, err) + } + } +} + +func (b *PlotContourConfigTimeseriesCreateBatchBatchResults) Close() error { + b.closed = true + return b.br.Close() +} + +const projectCreateBatch = `-- name: ProjectCreateBatch :batchone +insert into project (federal_id, slug, name, district_id, created_by, created_at) +values ($1, slugify($2, 'project'), $2, $3, $4, $5) +returning id, slug +` + +type ProjectCreateBatchBatchResults struct { + br pgx.BatchResults + tot int + closed bool +} + +type ProjectCreateBatchParams struct { + FederalID *string `json:"federal_id"` + Name string `json:"name"` + DistrictID *uuid.UUID `json:"district_id"` + CreatedBy uuid.UUID `json:"created_by"` + CreatedAt time.Time `json:"created_at"` +} + +type ProjectCreateBatchRow struct { + ID uuid.UUID `json:"id"` + Slug string `json:"slug"` +} + +func (q *Queries) ProjectCreateBatch(ctx context.Context, arg []ProjectCreateBatchParams) *ProjectCreateBatchBatchResults { + batch := &pgx.Batch{} + for _, a := range arg { + vals := []interface{}{ + a.FederalID, + a.Name, + a.DistrictID, + a.CreatedBy, + a.CreatedAt, + } + batch.Queue(projectCreateBatch, vals...) + } + br := q.db.SendBatch(ctx, batch) + return &ProjectCreateBatchBatchResults{br, len(arg), false} +} + +func (b *ProjectCreateBatchBatchResults) QueryRow(f func(int, ProjectCreateBatchRow, error)) { + defer b.br.Close() + for t := 0; t < b.tot; t++ { + var i ProjectCreateBatchRow + if b.closed { + if f != nil { + f(t, i, ErrBatchAlreadyClosed) + } + continue + } + row := b.br.QueryRow() + err := row.Scan(&i.ID, &i.Slug) + if f != nil { + f(t, i, err) + } + } +} + +func (b *ProjectCreateBatchBatchResults) Close() error { + b.closed = true + return b.br.Close() +} + +const projectInstrumentCreateBatch = `-- name: ProjectInstrumentCreateBatch :batchexec +insert into project_instrument (project_id, instrument_id) values ($1, $2) +on conflict on constraint project_instrument_project_id_instrument_id_key do nothing +` + +type ProjectInstrumentCreateBatchBatchResults struct { + br pgx.BatchResults + tot int + closed bool +} + +type ProjectInstrumentCreateBatchParams struct { + ProjectID uuid.UUID `json:"project_id"` + InstrumentID uuid.UUID `json:"instrument_id"` +} + +func (q *Queries) ProjectInstrumentCreateBatch(ctx context.Context, arg []ProjectInstrumentCreateBatchParams) *ProjectInstrumentCreateBatchBatchResults { + batch := &pgx.Batch{} + for _, a := range arg { + vals := []interface{}{ + a.ProjectID, + a.InstrumentID, + } + batch.Queue(projectInstrumentCreateBatch, vals...) + } + br := q.db.SendBatch(ctx, batch) + return &ProjectInstrumentCreateBatchBatchResults{br, len(arg), false} +} + +func (b *ProjectInstrumentCreateBatchBatchResults) Exec(f func(int, error)) { + defer b.br.Close() + for t := 0; t < b.tot; t++ { + if b.closed { + if f != nil { + f(t, ErrBatchAlreadyClosed) + } + continue + } + _, err := b.br.Exec() + if f != nil { + f(t, err) + } + } +} + +func (b *ProjectInstrumentCreateBatchBatchResults) Close() error { + b.closed = true + return b.br.Close() +} + +const projectInstrumentDeleteBatch = `-- name: ProjectInstrumentDeleteBatch :batchexec +delete from project_instrument where project_id = $1 and instrument_id = $2 +` + +type ProjectInstrumentDeleteBatchBatchResults struct { + br pgx.BatchResults + tot int + closed bool +} + +type ProjectInstrumentDeleteBatchParams struct { + ProjectID uuid.UUID `json:"project_id"` + InstrumentID uuid.UUID `json:"instrument_id"` +} + +func (q *Queries) ProjectInstrumentDeleteBatch(ctx context.Context, arg []ProjectInstrumentDeleteBatchParams) *ProjectInstrumentDeleteBatchBatchResults { + batch := &pgx.Batch{} + for _, a := range arg { + vals := []interface{}{ + a.ProjectID, + a.InstrumentID, + } + batch.Queue(projectInstrumentDeleteBatch, vals...) + } + br := q.db.SendBatch(ctx, batch) + return &ProjectInstrumentDeleteBatchBatchResults{br, len(arg), false} +} + +func (b *ProjectInstrumentDeleteBatchBatchResults) Exec(f func(int, error)) { + defer b.br.Close() + for t := 0; t < b.tot; t++ { + if b.closed { + if f != nil { + f(t, ErrBatchAlreadyClosed) + } + continue + } + _, err := b.br.Exec() + if f != nil { + f(t, err) + } + } +} + +func (b *ProjectInstrumentDeleteBatchBatchResults) Close() error { + b.closed = true + return b.br.Close() +} + +const reportConfigPlotConfigCreateBatch = `-- name: ReportConfigPlotConfigCreateBatch :batchexec +insert into report_config_plot_config (report_config_id, plot_config_id) values ($1, $2) +` + +type ReportConfigPlotConfigCreateBatchBatchResults struct { + br pgx.BatchResults + tot int + closed bool +} + +type ReportConfigPlotConfigCreateBatchParams struct { + ReportConfigID uuid.UUID `json:"report_config_id"` + PlotConfigID uuid.UUID `json:"plot_config_id"` +} + +func (q *Queries) ReportConfigPlotConfigCreateBatch(ctx context.Context, arg []ReportConfigPlotConfigCreateBatchParams) *ReportConfigPlotConfigCreateBatchBatchResults { + batch := &pgx.Batch{} + for _, a := range arg { + vals := []interface{}{ + a.ReportConfigID, + a.PlotConfigID, + } + batch.Queue(reportConfigPlotConfigCreateBatch, vals...) + } + br := q.db.SendBatch(ctx, batch) + return &ReportConfigPlotConfigCreateBatchBatchResults{br, len(arg), false} +} + +func (b *ReportConfigPlotConfigCreateBatchBatchResults) Exec(f func(int, error)) { + defer b.br.Close() + for t := 0; t < b.tot; t++ { + if b.closed { + if f != nil { + f(t, ErrBatchAlreadyClosed) + } + continue + } + _, err := b.br.Exec() + if f != nil { + f(t, err) + } + } +} + +func (b *ReportConfigPlotConfigCreateBatchBatchResults) Close() error { + b.closed = true + return b.br.Close() +} + +const reportConfigPlotConfigDeleteBatch = `-- name: ReportConfigPlotConfigDeleteBatch :batchexec +delete from report_config_plot_config where report_config_id=$1 and plot_config_id=$2 +` + +type ReportConfigPlotConfigDeleteBatchBatchResults struct { + br pgx.BatchResults + tot int + closed bool +} + +type ReportConfigPlotConfigDeleteBatchParams struct { + ReportConfigID uuid.UUID `json:"report_config_id"` + PlotConfigID uuid.UUID `json:"plot_config_id"` +} + +func (q *Queries) ReportConfigPlotConfigDeleteBatch(ctx context.Context, arg []ReportConfigPlotConfigDeleteBatchParams) *ReportConfigPlotConfigDeleteBatchBatchResults { + batch := &pgx.Batch{} + for _, a := range arg { + vals := []interface{}{ + a.ReportConfigID, + a.PlotConfigID, + } + batch.Queue(reportConfigPlotConfigDeleteBatch, vals...) + } + br := q.db.SendBatch(ctx, batch) + return &ReportConfigPlotConfigDeleteBatchBatchResults{br, len(arg), false} +} + +func (b *ReportConfigPlotConfigDeleteBatchBatchResults) Exec(f func(int, error)) { + defer b.br.Close() + for t := 0; t < b.tot; t++ { + if b.closed { + if f != nil { + f(t, ErrBatchAlreadyClosed) + } + continue + } + _, err := b.br.Exec() + if f != nil { + f(t, err) + } + } +} + +func (b *ReportConfigPlotConfigDeleteBatchBatchResults) Close() error { + b.closed = true + return b.br.Close() +} + +const saaOptsCreateBatch = `-- name: SaaOptsCreateBatch :batchexec +insert into saa_opts (instrument_id, num_segments, bottom_elevation_timeseries_id, initial_time) +values ($1, $2, $3, $4) +` + +type SaaOptsCreateBatchBatchResults struct { + br pgx.BatchResults + tot int + closed bool +} + +type SaaOptsCreateBatchParams struct { + InstrumentID uuid.UUID `json:"instrument_id"` + NumSegments int32 `json:"num_segments"` + BottomElevationTimeseriesID *uuid.UUID `json:"bottom_elevation_timeseries_id"` + InitialTime *time.Time `json:"initial_time"` +} + +func (q *Queries) SaaOptsCreateBatch(ctx context.Context, arg []SaaOptsCreateBatchParams) *SaaOptsCreateBatchBatchResults { + batch := &pgx.Batch{} + for _, a := range arg { + vals := []interface{}{ + a.InstrumentID, + a.NumSegments, + a.BottomElevationTimeseriesID, + a.InitialTime, + } + batch.Queue(saaOptsCreateBatch, vals...) + } + br := q.db.SendBatch(ctx, batch) + return &SaaOptsCreateBatchBatchResults{br, len(arg), false} +} + +func (b *SaaOptsCreateBatchBatchResults) Exec(f func(int, error)) { + defer b.br.Close() + for t := 0; t < b.tot; t++ { + if b.closed { + if f != nil { + f(t, ErrBatchAlreadyClosed) + } + continue + } + _, err := b.br.Exec() + if f != nil { + f(t, err) + } + } +} + +func (b *SaaOptsCreateBatchBatchResults) Close() error { + b.closed = true + return b.br.Close() +} + +const saaOptsUpdateBatch = `-- name: SaaOptsUpdateBatch :batchexec +update saa_opts set + bottom_elevation_timeseries_id = $2, + initial_time = $3 +where instrument_id = $1 +` + +type SaaOptsUpdateBatchBatchResults struct { + br pgx.BatchResults + tot int + closed bool +} + +type SaaOptsUpdateBatchParams struct { + InstrumentID uuid.UUID `json:"instrument_id"` + BottomElevationTimeseriesID *uuid.UUID `json:"bottom_elevation_timeseries_id"` + InitialTime *time.Time `json:"initial_time"` +} + +func (q *Queries) SaaOptsUpdateBatch(ctx context.Context, arg []SaaOptsUpdateBatchParams) *SaaOptsUpdateBatchBatchResults { + batch := &pgx.Batch{} + for _, a := range arg { + vals := []interface{}{ + a.InstrumentID, + a.BottomElevationTimeseriesID, + a.InitialTime, + } + batch.Queue(saaOptsUpdateBatch, vals...) + } + br := q.db.SendBatch(ctx, batch) + return &SaaOptsUpdateBatchBatchResults{br, len(arg), false} +} + +func (b *SaaOptsUpdateBatchBatchResults) Exec(f func(int, error)) { + defer b.br.Close() + for t := 0; t < b.tot; t++ { + if b.closed { + if f != nil { + f(t, ErrBatchAlreadyClosed) + } + continue + } + _, err := b.br.Exec() + if f != nil { + f(t, err) + } + } +} + +func (b *SaaOptsUpdateBatchBatchResults) Close() error { + b.closed = true + return b.br.Close() +} + +const saaSegmentCreateBatch = `-- name: SaaSegmentCreateBatch :batchexec +insert into saa_segment ( + id, + instrument_id, + length_timeseries_id, + x_timeseries_id, + y_timeseries_id, + z_timeseries_id, + temp_timeseries_id +) values ($1, $2, $3, $4, $5, $6, $7) +` + +type SaaSegmentCreateBatchBatchResults struct { + br pgx.BatchResults + tot int + closed bool +} + +type SaaSegmentCreateBatchParams struct { + ID int32 `json:"id"` + InstrumentID uuid.UUID `json:"instrument_id"` + LengthTimeseriesID *uuid.UUID `json:"length_timeseries_id"` + XTimeseriesID *uuid.UUID `json:"x_timeseries_id"` + YTimeseriesID *uuid.UUID `json:"y_timeseries_id"` + ZTimeseriesID *uuid.UUID `json:"z_timeseries_id"` + TempTimeseriesID *uuid.UUID `json:"temp_timeseries_id"` +} + +func (q *Queries) SaaSegmentCreateBatch(ctx context.Context, arg []SaaSegmentCreateBatchParams) *SaaSegmentCreateBatchBatchResults { + batch := &pgx.Batch{} + for _, a := range arg { + vals := []interface{}{ + a.ID, + a.InstrumentID, + a.LengthTimeseriesID, + a.XTimeseriesID, + a.YTimeseriesID, + a.ZTimeseriesID, + a.TempTimeseriesID, + } + batch.Queue(saaSegmentCreateBatch, vals...) + } + br := q.db.SendBatch(ctx, batch) + return &SaaSegmentCreateBatchBatchResults{br, len(arg), false} +} + +func (b *SaaSegmentCreateBatchBatchResults) Exec(f func(int, error)) { + defer b.br.Close() + for t := 0; t < b.tot; t++ { + if b.closed { + if f != nil { + f(t, ErrBatchAlreadyClosed) + } + continue + } + _, err := b.br.Exec() + if f != nil { + f(t, err) + } + } +} + +func (b *SaaSegmentCreateBatchBatchResults) Close() error { + b.closed = true + return b.br.Close() +} + +const saaSegmentUpdateBatch = `-- name: SaaSegmentUpdateBatch :batchexec +update saa_segment set + length_timeseries_id = $3, + x_timeseries_id = $4, + y_timeseries_id = $5, + z_timeseries_id = $6, + temp_timeseries_id = $7 +where id = $1 and instrument_id = $2 +` + +type SaaSegmentUpdateBatchBatchResults struct { + br pgx.BatchResults + tot int + closed bool +} + +type SaaSegmentUpdateBatchParams struct { + ID int32 `json:"id"` + InstrumentID uuid.UUID `json:"instrument_id"` + LengthTimeseriesID *uuid.UUID `json:"length_timeseries_id"` + XTimeseriesID *uuid.UUID `json:"x_timeseries_id"` + YTimeseriesID *uuid.UUID `json:"y_timeseries_id"` + ZTimeseriesID *uuid.UUID `json:"z_timeseries_id"` + TempTimeseriesID *uuid.UUID `json:"temp_timeseries_id"` +} + +func (q *Queries) SaaSegmentUpdateBatch(ctx context.Context, arg []SaaSegmentUpdateBatchParams) *SaaSegmentUpdateBatchBatchResults { + batch := &pgx.Batch{} + for _, a := range arg { + vals := []interface{}{ + a.ID, + a.InstrumentID, + a.LengthTimeseriesID, + a.XTimeseriesID, + a.YTimeseriesID, + a.ZTimeseriesID, + a.TempTimeseriesID, + } + batch.Queue(saaSegmentUpdateBatch, vals...) + } + br := q.db.SendBatch(ctx, batch) + return &SaaSegmentUpdateBatchBatchResults{br, len(arg), false} +} + +func (b *SaaSegmentUpdateBatchBatchResults) Exec(f func(int, error)) { + defer b.br.Close() + for t := 0; t < b.tot; t++ { + if b.closed { + if f != nil { + f(t, ErrBatchAlreadyClosed) + } + continue + } + _, err := b.br.Exec() + if f != nil { + f(t, err) + } + } +} + +func (b *SaaSegmentUpdateBatchBatchResults) Close() error { + b.closed = true + return b.br.Close() +} + +const timeseriesCreateBatch = `-- name: TimeseriesCreateBatch :batchone +insert into timeseries (instrument_id, slug, name, parameter_id, unit_id, type) +values ($1, slugify($2, 'timeseries'), $2, $3, $4, $5) +returning id, instrument_id, slug, name, parameter_id, unit_id, type +` + +type TimeseriesCreateBatchBatchResults struct { + br pgx.BatchResults + tot int + closed bool +} + +type TimeseriesCreateBatchParams struct { + InstrumentID *uuid.UUID `json:"instrument_id"` + Name string `json:"name"` + ParameterID uuid.UUID `json:"parameter_id"` + UnitID uuid.UUID `json:"unit_id"` + Type TimeseriesType `json:"type"` +} + +type TimeseriesCreateBatchRow struct { + ID uuid.UUID `json:"id"` + InstrumentID *uuid.UUID `json:"instrument_id"` + Slug string `json:"slug"` + Name string `json:"name"` + ParameterID uuid.UUID `json:"parameter_id"` + UnitID uuid.UUID `json:"unit_id"` + Type TimeseriesType `json:"type"` +} + +func (q *Queries) TimeseriesCreateBatch(ctx context.Context, arg []TimeseriesCreateBatchParams) *TimeseriesCreateBatchBatchResults { + batch := &pgx.Batch{} + for _, a := range arg { + vals := []interface{}{ + a.InstrumentID, + a.Name, + a.ParameterID, + a.UnitID, + a.Type, + } + batch.Queue(timeseriesCreateBatch, vals...) + } + br := q.db.SendBatch(ctx, batch) + return &TimeseriesCreateBatchBatchResults{br, len(arg), false} +} + +func (b *TimeseriesCreateBatchBatchResults) QueryRow(f func(int, TimeseriesCreateBatchRow, error)) { + defer b.br.Close() + for t := 0; t < b.tot; t++ { + var i TimeseriesCreateBatchRow + if b.closed { + if f != nil { + f(t, i, ErrBatchAlreadyClosed) + } + continue + } + row := b.br.QueryRow() + err := row.Scan( + &i.ID, + &i.InstrumentID, + &i.Slug, + &i.Name, + &i.ParameterID, + &i.UnitID, + &i.Type, + ) + if f != nil { + f(t, i, err) + } + } +} + +func (b *TimeseriesCreateBatchBatchResults) Close() error { + b.closed = true + return b.br.Close() +} + +const timeseriesCwmsCreateBatch = `-- name: TimeseriesCwmsCreateBatch :batchexec +insert into timeseries_cwms (timeseries_id, cwms_timeseries_id, cwms_office_id, cwms_extent_earliest_time, cwms_extent_latest_time) values +($1, $2, $3, $4, $5) +` + +type TimeseriesCwmsCreateBatchBatchResults struct { + br pgx.BatchResults + tot int + closed bool +} + +type TimeseriesCwmsCreateBatchParams struct { + TimeseriesID uuid.UUID `json:"timeseries_id"` + CwmsTimeseriesID string `json:"cwms_timeseries_id"` + CwmsOfficeID string `json:"cwms_office_id"` + CwmsExtentEarliestTime time.Time `json:"cwms_extent_earliest_time"` + CwmsExtentLatestTime *time.Time `json:"cwms_extent_latest_time"` +} + +func (q *Queries) TimeseriesCwmsCreateBatch(ctx context.Context, arg []TimeseriesCwmsCreateBatchParams) *TimeseriesCwmsCreateBatchBatchResults { + batch := &pgx.Batch{} + for _, a := range arg { + vals := []interface{}{ + a.TimeseriesID, + a.CwmsTimeseriesID, + a.CwmsOfficeID, + a.CwmsExtentEarliestTime, + a.CwmsExtentLatestTime, + } + batch.Queue(timeseriesCwmsCreateBatch, vals...) + } + br := q.db.SendBatch(ctx, batch) + return &TimeseriesCwmsCreateBatchBatchResults{br, len(arg), false} +} + +func (b *TimeseriesCwmsCreateBatchBatchResults) Exec(f func(int, error)) { + defer b.br.Close() + for t := 0; t < b.tot; t++ { + if b.closed { + if f != nil { + f(t, ErrBatchAlreadyClosed) + } + continue + } + _, err := b.br.Exec() + if f != nil { + f(t, err) + } + } +} + +func (b *TimeseriesCwmsCreateBatchBatchResults) Close() error { + b.closed = true + return b.br.Close() +} + +const timeseriesMeasurementCreateBatch = `-- name: TimeseriesMeasurementCreateBatch :batchexec +insert into timeseries_measurement (timeseries_id, time, value) values ($1, $2, $3) +on conflict on constraint timeseries_unique_time do nothing +` + +type TimeseriesMeasurementCreateBatchBatchResults struct { + br pgx.BatchResults + tot int + closed bool +} + +type TimeseriesMeasurementCreateBatchParams struct { + TimeseriesID uuid.UUID `json:"timeseries_id"` + Time time.Time `json:"time"` + Value float64 `json:"value"` +} + +func (q *Queries) TimeseriesMeasurementCreateBatch(ctx context.Context, arg []TimeseriesMeasurementCreateBatchParams) *TimeseriesMeasurementCreateBatchBatchResults { + batch := &pgx.Batch{} + for _, a := range arg { + vals := []interface{}{ + a.TimeseriesID, + a.Time, + a.Value, + } + batch.Queue(timeseriesMeasurementCreateBatch, vals...) + } + br := q.db.SendBatch(ctx, batch) + return &TimeseriesMeasurementCreateBatchBatchResults{br, len(arg), false} +} + +func (b *TimeseriesMeasurementCreateBatchBatchResults) Exec(f func(int, error)) { + defer b.br.Close() + for t := 0; t < b.tot; t++ { + if b.closed { + if f != nil { + f(t, ErrBatchAlreadyClosed) + } + continue + } + _, err := b.br.Exec() + if f != nil { + f(t, err) + } + } +} + +func (b *TimeseriesMeasurementCreateBatchBatchResults) Close() error { + b.closed = true + return b.br.Close() +} + +const timeseriesMeasurementCreateOrUpdateAtTimezoneBatch = `-- name: TimeseriesMeasurementCreateOrUpdateAtTimezoneBatch :batchexec +insert into timeseries_measurement (timeseries_id, time, value) +values ($1, (($3::timestamp at time zone $2::text) at time zone 'UTC')::timestamptz, $4) +on conflict on constraint timeseries_unique_time do update set value = excluded.value +` + +type TimeseriesMeasurementCreateOrUpdateAtTimezoneBatchBatchResults struct { + br pgx.BatchResults + tot int + closed bool +} + +type TimeseriesMeasurementCreateOrUpdateAtTimezoneBatchParams struct { + TimeseriesID uuid.UUID `json:"timeseries_id"` + Timezone string `json:"timezone"` + LocalTime pgtype.Timestamp `json:"local_time"` + Value float64 `json:"value"` +} + +func (q *Queries) TimeseriesMeasurementCreateOrUpdateAtTimezoneBatch(ctx context.Context, arg []TimeseriesMeasurementCreateOrUpdateAtTimezoneBatchParams) *TimeseriesMeasurementCreateOrUpdateAtTimezoneBatchBatchResults { + batch := &pgx.Batch{} + for _, a := range arg { + vals := []interface{}{ + a.TimeseriesID, + a.Timezone, + a.LocalTime, + a.Value, + } + batch.Queue(timeseriesMeasurementCreateOrUpdateAtTimezoneBatch, vals...) + } + br := q.db.SendBatch(ctx, batch) + return &TimeseriesMeasurementCreateOrUpdateAtTimezoneBatchBatchResults{br, len(arg), false} +} + +func (b *TimeseriesMeasurementCreateOrUpdateAtTimezoneBatchBatchResults) Exec(f func(int, error)) { + defer b.br.Close() + for t := 0; t < b.tot; t++ { + if b.closed { + if f != nil { + f(t, ErrBatchAlreadyClosed) + } + continue + } + _, err := b.br.Exec() + if f != nil { + f(t, err) + } + } +} + +func (b *TimeseriesMeasurementCreateOrUpdateAtTimezoneBatchBatchResults) Close() error { + b.closed = true + return b.br.Close() +} + +const timeseriesMeasurementCreateOrUpdateBatch = `-- name: TimeseriesMeasurementCreateOrUpdateBatch :batchexec +insert into timeseries_measurement (timeseries_id, time, value) values ($1, $2, $3) +on conflict on constraint timeseries_unique_time do update set value = excluded.value +` + +type TimeseriesMeasurementCreateOrUpdateBatchBatchResults struct { + br pgx.BatchResults + tot int + closed bool +} + +type TimeseriesMeasurementCreateOrUpdateBatchParams struct { + TimeseriesID uuid.UUID `json:"timeseries_id"` + Time time.Time `json:"time"` + Value float64 `json:"value"` +} + +func (q *Queries) TimeseriesMeasurementCreateOrUpdateBatch(ctx context.Context, arg []TimeseriesMeasurementCreateOrUpdateBatchParams) *TimeseriesMeasurementCreateOrUpdateBatchBatchResults { + batch := &pgx.Batch{} + for _, a := range arg { + vals := []interface{}{ + a.TimeseriesID, + a.Time, + a.Value, + } + batch.Queue(timeseriesMeasurementCreateOrUpdateBatch, vals...) + } + br := q.db.SendBatch(ctx, batch) + return &TimeseriesMeasurementCreateOrUpdateBatchBatchResults{br, len(arg), false} +} + +func (b *TimeseriesMeasurementCreateOrUpdateBatchBatchResults) Exec(f func(int, error)) { + defer b.br.Close() + for t := 0; t < b.tot; t++ { + if b.closed { + if f != nil { + f(t, ErrBatchAlreadyClosed) + } + continue + } + _, err := b.br.Exec() + if f != nil { + f(t, err) + } + } +} + +func (b *TimeseriesMeasurementCreateOrUpdateBatchBatchResults) Close() error { + b.closed = true + return b.br.Close() +} + +const timeseriesMeasurementDeleteBatch = `-- name: TimeseriesMeasurementDeleteBatch :batchexec +delete from timeseries_measurement where timeseries_id=$1 and time=$2 +` + +type TimeseriesMeasurementDeleteBatchBatchResults struct { + br pgx.BatchResults + tot int + closed bool +} + +type TimeseriesMeasurementDeleteBatchParams struct { + TimeseriesID uuid.UUID `json:"timeseries_id"` + Time time.Time `json:"time"` +} + +func (q *Queries) TimeseriesMeasurementDeleteBatch(ctx context.Context, arg []TimeseriesMeasurementDeleteBatchParams) *TimeseriesMeasurementDeleteBatchBatchResults { + batch := &pgx.Batch{} + for _, a := range arg { + vals := []interface{}{ + a.TimeseriesID, + a.Time, + } + batch.Queue(timeseriesMeasurementDeleteBatch, vals...) + } + br := q.db.SendBatch(ctx, batch) + return &TimeseriesMeasurementDeleteBatchBatchResults{br, len(arg), false} +} + +func (b *TimeseriesMeasurementDeleteBatchBatchResults) Exec(f func(int, error)) { + defer b.br.Close() + for t := 0; t < b.tot; t++ { + if b.closed { + if f != nil { + f(t, ErrBatchAlreadyClosed) + } + continue + } + _, err := b.br.Exec() + if f != nil { + f(t, err) + } + } +} + +func (b *TimeseriesMeasurementDeleteBatchBatchResults) Close() error { + b.closed = true + return b.br.Close() +} + +const timeseriesMeasurementDeleteRangeBatch = `-- name: TimeseriesMeasurementDeleteRangeBatch :batchexec +delete from timeseries_measurement where timeseries_id = $1 and time > $2 and time < $3 +` + +type TimeseriesMeasurementDeleteRangeBatchBatchResults struct { + br pgx.BatchResults + tot int + closed bool +} + +type TimeseriesMeasurementDeleteRangeBatchParams struct { + TimeseriesID uuid.UUID `json:"timeseries_id"` + After time.Time `json:"after"` + Before time.Time `json:"before"` +} + +func (q *Queries) TimeseriesMeasurementDeleteRangeBatch(ctx context.Context, arg []TimeseriesMeasurementDeleteRangeBatchParams) *TimeseriesMeasurementDeleteRangeBatchBatchResults { + batch := &pgx.Batch{} + for _, a := range arg { + vals := []interface{}{ + a.TimeseriesID, + a.After, + a.Before, + } + batch.Queue(timeseriesMeasurementDeleteRangeBatch, vals...) + } + br := q.db.SendBatch(ctx, batch) + return &TimeseriesMeasurementDeleteRangeBatchBatchResults{br, len(arg), false} +} + +func (b *TimeseriesMeasurementDeleteRangeBatchBatchResults) Exec(f func(int, error)) { + defer b.br.Close() + for t := 0; t < b.tot; t++ { + if b.closed { + if f != nil { + f(t, ErrBatchAlreadyClosed) + } + continue + } + _, err := b.br.Exec() + if f != nil { + f(t, err) + } + } +} + +func (b *TimeseriesMeasurementDeleteRangeBatchBatchResults) Close() error { + b.closed = true + return b.br.Close() +} + +const timeseriesNoteCreateBatch = `-- name: TimeseriesNoteCreateBatch :batchexec +insert into timeseries_notes (timeseries_id, time, masked, validated, annotation) values ($1, $2, $3, $4, $5) +on conflict on constraint notes_unique_time do nothing +` + +type TimeseriesNoteCreateBatchBatchResults struct { + br pgx.BatchResults + tot int + closed bool +} + +type TimeseriesNoteCreateBatchParams struct { + TimeseriesID uuid.UUID `json:"timeseries_id"` + Time time.Time `json:"time"` + Masked *bool `json:"masked"` + Validated *bool `json:"validated"` + Annotation *string `json:"annotation"` +} + +func (q *Queries) TimeseriesNoteCreateBatch(ctx context.Context, arg []TimeseriesNoteCreateBatchParams) *TimeseriesNoteCreateBatchBatchResults { + batch := &pgx.Batch{} + for _, a := range arg { + vals := []interface{}{ + a.TimeseriesID, + a.Time, + a.Masked, + a.Validated, + a.Annotation, + } + batch.Queue(timeseriesNoteCreateBatch, vals...) + } + br := q.db.SendBatch(ctx, batch) + return &TimeseriesNoteCreateBatchBatchResults{br, len(arg), false} +} + +func (b *TimeseriesNoteCreateBatchBatchResults) Exec(f func(int, error)) { + defer b.br.Close() + for t := 0; t < b.tot; t++ { + if b.closed { + if f != nil { + f(t, ErrBatchAlreadyClosed) + } + continue + } + _, err := b.br.Exec() + if f != nil { + f(t, err) + } + } +} + +func (b *TimeseriesNoteCreateBatchBatchResults) Close() error { + b.closed = true + return b.br.Close() +} + +const timeseriesNoteCreateOrUpdateAtTimezoneBatch = `-- name: TimeseriesNoteCreateOrUpdateAtTimezoneBatch :batchexec +insert into timeseries_notes (timeseries_id, time, masked, validated, annotation) +values ($1, (($3::timestamp at time zone $2::text) at time zone 'UTC')::timestamptz, $4, $5, $6) +on conflict on constraint notes_unique_time do nothing +` + +type TimeseriesNoteCreateOrUpdateAtTimezoneBatchBatchResults struct { + br pgx.BatchResults + tot int + closed bool +} + +type TimeseriesNoteCreateOrUpdateAtTimezoneBatchParams struct { + TimeseriesID uuid.UUID `json:"timeseries_id"` + Timezone string `json:"timezone"` + LocalTime pgtype.Timestamp `json:"local_time"` + Masked *bool `json:"masked"` + Validated *bool `json:"validated"` + Annotation *string `json:"annotation"` +} + +func (q *Queries) TimeseriesNoteCreateOrUpdateAtTimezoneBatch(ctx context.Context, arg []TimeseriesNoteCreateOrUpdateAtTimezoneBatchParams) *TimeseriesNoteCreateOrUpdateAtTimezoneBatchBatchResults { + batch := &pgx.Batch{} + for _, a := range arg { + vals := []interface{}{ + a.TimeseriesID, + a.Timezone, + a.LocalTime, + a.Masked, + a.Validated, + a.Annotation, + } + batch.Queue(timeseriesNoteCreateOrUpdateAtTimezoneBatch, vals...) + } + br := q.db.SendBatch(ctx, batch) + return &TimeseriesNoteCreateOrUpdateAtTimezoneBatchBatchResults{br, len(arg), false} +} + +func (b *TimeseriesNoteCreateOrUpdateAtTimezoneBatchBatchResults) Exec(f func(int, error)) { + defer b.br.Close() + for t := 0; t < b.tot; t++ { + if b.closed { + if f != nil { + f(t, ErrBatchAlreadyClosed) + } + continue + } + _, err := b.br.Exec() + if f != nil { + f(t, err) + } + } +} + +func (b *TimeseriesNoteCreateOrUpdateAtTimezoneBatchBatchResults) Close() error { + b.closed = true + return b.br.Close() +} + +const timeseriesNoteCreateOrUpdateBatch = `-- name: TimeseriesNoteCreateOrUpdateBatch :batchexec +insert into timeseries_notes (timeseries_id, time, masked, validated, annotation) values ($1, $2, $3, $4, $5) +on conflict on constraint notes_unique_time do update set masked = excluded.masked, validated = excluded.validated, annotation = excluded.annotation +` + +type TimeseriesNoteCreateOrUpdateBatchBatchResults struct { + br pgx.BatchResults + tot int + closed bool +} + +type TimeseriesNoteCreateOrUpdateBatchParams struct { + TimeseriesID uuid.UUID `json:"timeseries_id"` + Time time.Time `json:"time"` + Masked *bool `json:"masked"` + Validated *bool `json:"validated"` + Annotation *string `json:"annotation"` +} + +func (q *Queries) TimeseriesNoteCreateOrUpdateBatch(ctx context.Context, arg []TimeseriesNoteCreateOrUpdateBatchParams) *TimeseriesNoteCreateOrUpdateBatchBatchResults { + batch := &pgx.Batch{} + for _, a := range arg { + vals := []interface{}{ + a.TimeseriesID, + a.Time, + a.Masked, + a.Validated, + a.Annotation, + } + batch.Queue(timeseriesNoteCreateOrUpdateBatch, vals...) + } + br := q.db.SendBatch(ctx, batch) + return &TimeseriesNoteCreateOrUpdateBatchBatchResults{br, len(arg), false} +} + +func (b *TimeseriesNoteCreateOrUpdateBatchBatchResults) Exec(f func(int, error)) { + defer b.br.Close() + for t := 0; t < b.tot; t++ { + if b.closed { + if f != nil { + f(t, ErrBatchAlreadyClosed) + } + continue + } + _, err := b.br.Exec() + if f != nil { + f(t, err) + } + } +} + +func (b *TimeseriesNoteCreateOrUpdateBatchBatchResults) Close() error { + b.closed = true + return b.br.Close() +} + +const timeseriesNoteDeleteBatch = `-- name: TimeseriesNoteDeleteBatch :batchexec +delete from timeseries_notes where timeseries_id=$1 and time=$2 +` + +type TimeseriesNoteDeleteBatchBatchResults struct { + br pgx.BatchResults + tot int + closed bool +} + +type TimeseriesNoteDeleteBatchParams struct { + TimeseriesID uuid.UUID `json:"timeseries_id"` + Time time.Time `json:"time"` +} + +func (q *Queries) TimeseriesNoteDeleteBatch(ctx context.Context, arg []TimeseriesNoteDeleteBatchParams) *TimeseriesNoteDeleteBatchBatchResults { + batch := &pgx.Batch{} + for _, a := range arg { + vals := []interface{}{ + a.TimeseriesID, + a.Time, + } + batch.Queue(timeseriesNoteDeleteBatch, vals...) + } + br := q.db.SendBatch(ctx, batch) + return &TimeseriesNoteDeleteBatchBatchResults{br, len(arg), false} +} + +func (b *TimeseriesNoteDeleteBatchBatchResults) Exec(f func(int, error)) { + defer b.br.Close() + for t := 0; t < b.tot; t++ { + if b.closed { + if f != nil { + f(t, ErrBatchAlreadyClosed) + } + continue + } + _, err := b.br.Exec() + if f != nil { + f(t, err) + } + } +} + +func (b *TimeseriesNoteDeleteBatchBatchResults) Close() error { + b.closed = true + return b.br.Close() +} + +const timeseriesNoteDeleteRangeBatch = `-- name: TimeseriesNoteDeleteRangeBatch :batchexec +delete from timeseries_notes where timeseries_id = $1 and time > $2 and time < $3 +` + +type TimeseriesNoteDeleteRangeBatchBatchResults struct { + br pgx.BatchResults + tot int + closed bool +} + +type TimeseriesNoteDeleteRangeBatchParams struct { + TimeseriesID uuid.UUID `json:"timeseries_id"` + After time.Time `json:"after"` + Before time.Time `json:"before"` +} + +func (q *Queries) TimeseriesNoteDeleteRangeBatch(ctx context.Context, arg []TimeseriesNoteDeleteRangeBatchParams) *TimeseriesNoteDeleteRangeBatchBatchResults { + batch := &pgx.Batch{} + for _, a := range arg { + vals := []interface{}{ + a.TimeseriesID, + a.After, + a.Before, + } + batch.Queue(timeseriesNoteDeleteRangeBatch, vals...) + } + br := q.db.SendBatch(ctx, batch) + return &TimeseriesNoteDeleteRangeBatchBatchResults{br, len(arg), false} +} + +func (b *TimeseriesNoteDeleteRangeBatchBatchResults) Exec(f func(int, error)) { + defer b.br.Close() + for t := 0; t < b.tot; t++ { + if b.closed { + if f != nil { + f(t, ErrBatchAlreadyClosed) + } + continue + } + _, err := b.br.Exec() + if f != nil { + f(t, err) + } + } +} + +func (b *TimeseriesNoteDeleteRangeBatchBatchResults) Close() error { + b.closed = true + return b.br.Close() +} + +const uploaderConfigMappingCreateBatch = `-- name: UploaderConfigMappingCreateBatch :batchexec +insert into uploader_config_mapping (uploader_config_id, field_name, timeseries_id) values ($1, $2, $3) +` + +type UploaderConfigMappingCreateBatchBatchResults struct { + br pgx.BatchResults + tot int + closed bool +} + +type UploaderConfigMappingCreateBatchParams struct { + UploaderConfigID uuid.UUID `json:"uploader_config_id"` + FieldName string `json:"field_name"` + TimeseriesID *uuid.UUID `json:"timeseries_id"` +} + +func (q *Queries) UploaderConfigMappingCreateBatch(ctx context.Context, arg []UploaderConfigMappingCreateBatchParams) *UploaderConfigMappingCreateBatchBatchResults { + batch := &pgx.Batch{} + for _, a := range arg { + vals := []interface{}{ + a.UploaderConfigID, + a.FieldName, + a.TimeseriesID, + } + batch.Queue(uploaderConfigMappingCreateBatch, vals...) + } + br := q.db.SendBatch(ctx, batch) + return &UploaderConfigMappingCreateBatchBatchResults{br, len(arg), false} +} + +func (b *UploaderConfigMappingCreateBatchBatchResults) Exec(f func(int, error)) { + defer b.br.Close() + for t := 0; t < b.tot; t++ { + if b.closed { + if f != nil { + f(t, ErrBatchAlreadyClosed) + } + continue + } + _, err := b.br.Exec() + if f != nil { + f(t, err) + } + } +} + +func (b *UploaderConfigMappingCreateBatchBatchResults) Close() error { + b.closed = true + return b.br.Close() +} diff --git a/api/internal/db/collection_group.sql_gen.go b/api/internal/db/collection_group.sql_gen.go new file mode 100644 index 00000000..e7558afd --- /dev/null +++ b/api/internal/db/collection_group.sql_gen.go @@ -0,0 +1,205 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.27.0 +// source: collection_group.sql + +package db + +import ( + "context" + "time" + + "github.com/google/uuid" +) + +const collectionGroupCreate = `-- name: CollectionGroupCreate :one +insert into collection_group (project_id, name, slug, created_by, created_at, sort_order) +values ($1, $2::varchar, slugify($2::varchar, 'collection_group'), $3, $4, $5) +returning id, project_id, name, slug, created_by, created_at, updated_by, updated_at, sort_order +` + +type CollectionGroupCreateParams struct { + ProjectID uuid.UUID `json:"project_id"` + Name string `json:"name"` + CreatedBy uuid.UUID `json:"created_by"` + CreatedAt time.Time `json:"created_at"` + SortOrder int32 `json:"sort_order"` +} + +func (q *Queries) CollectionGroupCreate(ctx context.Context, arg CollectionGroupCreateParams) (CollectionGroup, error) { + row := q.db.QueryRow(ctx, collectionGroupCreate, + arg.ProjectID, + arg.Name, + arg.CreatedBy, + arg.CreatedAt, + arg.SortOrder, + ) + var i CollectionGroup + err := row.Scan( + &i.ID, + &i.ProjectID, + &i.Name, + &i.Slug, + &i.CreatedBy, + &i.CreatedAt, + &i.UpdatedBy, + &i.UpdatedAt, + &i.SortOrder, + ) + return i, err +} + +const collectionGroupDelete = `-- name: CollectionGroupDelete :exec +delete from collection_group where project_id=$1 and id=$2 +` + +type CollectionGroupDeleteParams struct { + ProjectID uuid.UUID `json:"project_id"` + ID uuid.UUID `json:"id"` +} + +func (q *Queries) CollectionGroupDelete(ctx context.Context, arg CollectionGroupDeleteParams) error { + _, err := q.db.Exec(ctx, collectionGroupDelete, arg.ProjectID, arg.ID) + return err +} + +const collectionGroupDetailsGet = `-- name: CollectionGroupDetailsGet :one +select id, project_id, name, slug, created_by, created_at, updated_by, updated_at, sort_order, timeseries from v_collection_group_details where id = $1 +` + +func (q *Queries) CollectionGroupDetailsGet(ctx context.Context, id uuid.UUID) (VCollectionGroupDetails, error) { + row := q.db.QueryRow(ctx, collectionGroupDetailsGet, id) + var i VCollectionGroupDetails + err := row.Scan( + &i.ID, + &i.ProjectID, + &i.Name, + &i.Slug, + &i.CreatedBy, + &i.CreatedAt, + &i.UpdatedBy, + &i.UpdatedAt, + &i.SortOrder, + &i.Timeseries, + ) + return i, err +} + +const collectionGroupListForProject = `-- name: CollectionGroupListForProject :many +select id, project_id, name, slug, created_by, created_at, updated_by, updated_at, sort_order from collection_group where project_id = $1 +` + +func (q *Queries) CollectionGroupListForProject(ctx context.Context, projectID uuid.UUID) ([]CollectionGroup, error) { + rows, err := q.db.Query(ctx, collectionGroupListForProject, projectID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []CollectionGroup{} + for rows.Next() { + var i CollectionGroup + if err := rows.Scan( + &i.ID, + &i.ProjectID, + &i.Name, + &i.Slug, + &i.CreatedBy, + &i.CreatedAt, + &i.UpdatedBy, + &i.UpdatedAt, + &i.SortOrder, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const collectionGroupTimeseriesCreate = `-- name: CollectionGroupTimeseriesCreate :exec +insert into collection_group_timeseries (collection_group_id, timeseries_id, sort_order) values ($1, $2, $3) +on conflict on constraint collection_group_unique_timeseries do nothing +` + +type CollectionGroupTimeseriesCreateParams struct { + CollectionGroupID uuid.UUID `json:"collection_group_id"` + TimeseriesID uuid.UUID `json:"timeseries_id"` + SortOrder int32 `json:"sort_order"` +} + +func (q *Queries) CollectionGroupTimeseriesCreate(ctx context.Context, arg CollectionGroupTimeseriesCreateParams) error { + _, err := q.db.Exec(ctx, collectionGroupTimeseriesCreate, arg.CollectionGroupID, arg.TimeseriesID, arg.SortOrder) + return err +} + +const collectionGroupTimeseriesDelete = `-- name: CollectionGroupTimeseriesDelete :exec +delete from collection_group_timeseries where collection_group_id=$1 and timeseries_id = $2 +` + +type CollectionGroupTimeseriesDeleteParams struct { + CollectionGroupID uuid.UUID `json:"collection_group_id"` + TimeseriesID uuid.UUID `json:"timeseries_id"` +} + +func (q *Queries) CollectionGroupTimeseriesDelete(ctx context.Context, arg CollectionGroupTimeseriesDeleteParams) error { + _, err := q.db.Exec(ctx, collectionGroupTimeseriesDelete, arg.CollectionGroupID, arg.TimeseriesID) + return err +} + +const collectionGroupTimeseriesUpdateSortOrder = `-- name: CollectionGroupTimeseriesUpdateSortOrder :exec +update collection_group_timeseries set sort_order=$3 +where collection_group_id=$1 and timeseries_id=$2 +` + +type CollectionGroupTimeseriesUpdateSortOrderParams struct { + CollectionGroupID uuid.UUID `json:"collection_group_id"` + TimeseriesID uuid.UUID `json:"timeseries_id"` + SortOrder int32 `json:"sort_order"` +} + +func (q *Queries) CollectionGroupTimeseriesUpdateSortOrder(ctx context.Context, arg CollectionGroupTimeseriesUpdateSortOrderParams) error { + _, err := q.db.Exec(ctx, collectionGroupTimeseriesUpdateSortOrder, arg.CollectionGroupID, arg.TimeseriesID, arg.SortOrder) + return err +} + +const collectionGroupUpdate = `-- name: CollectionGroupUpdate :one +update collection_group set name=$3, updated_by=$4, updated_at=$5, sort_order=$6 +where project_id=$1 and id=$2 +returning id, project_id, name, slug, created_by, created_at, updated_by, updated_at, sort_order +` + +type CollectionGroupUpdateParams struct { + ProjectID uuid.UUID `json:"project_id"` + ID uuid.UUID `json:"id"` + Name string `json:"name"` + UpdatedBy *uuid.UUID `json:"updated_by"` + UpdatedAt *time.Time `json:"updated_at"` + SortOrder int32 `json:"sort_order"` +} + +func (q *Queries) CollectionGroupUpdate(ctx context.Context, arg CollectionGroupUpdateParams) (CollectionGroup, error) { + row := q.db.QueryRow(ctx, collectionGroupUpdate, + arg.ProjectID, + arg.ID, + arg.Name, + arg.UpdatedBy, + arg.UpdatedAt, + arg.SortOrder, + ) + var i CollectionGroup + err := row.Scan( + &i.ID, + &i.ProjectID, + &i.Name, + &i.Slug, + &i.CreatedBy, + &i.CreatedAt, + &i.UpdatedBy, + &i.UpdatedAt, + &i.SortOrder, + ) + return i, err +} diff --git a/api/internal/db/datalogger.sql_gen.go b/api/internal/db/datalogger.sql_gen.go new file mode 100644 index 00000000..1757d9e3 --- /dev/null +++ b/api/internal/db/datalogger.sql_gen.go @@ -0,0 +1,339 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.27.0 +// source: datalogger.sql + +package db + +import ( + "context" + "time" + + "github.com/google/uuid" +) + +const dataloggerCreate = `-- name: DataloggerCreate :one +insert into datalogger (name, sn, project_id, created_by, updated_by, slug, model_id) +values ($1, $2, $3, $4, $4, slugify($1, 'datalogger'), $5) +returning id +` + +type DataloggerCreateParams struct { + Name string `json:"name"` + Sn string `json:"sn"` + ProjectID uuid.UUID `json:"project_id"` + CreatedBy uuid.UUID `json:"created_by"` + ModelID uuid.UUID `json:"model_id"` +} + +func (q *Queries) DataloggerCreate(ctx context.Context, arg DataloggerCreateParams) (uuid.UUID, error) { + row := q.db.QueryRow(ctx, dataloggerCreate, + arg.Name, + arg.Sn, + arg.ProjectID, + arg.CreatedBy, + arg.ModelID, + ) + var id uuid.UUID + err := row.Scan(&id) + return id, err +} + +const dataloggerDelete = `-- name: DataloggerDelete :exec +update datalogger set deleted=true, updated_by=$2, updated_at=$3 where id=$1 +` + +type DataloggerDeleteParams struct { + ID uuid.UUID `json:"id"` + UpdatedBy *uuid.UUID `json:"updated_by"` + UpdatedAt *time.Time `json:"updated_at"` +} + +func (q *Queries) DataloggerDelete(ctx context.Context, arg DataloggerDeleteParams) error { + _, err := q.db.Exec(ctx, dataloggerDelete, arg.ID, arg.UpdatedBy, arg.UpdatedAt) + return err +} + +const dataloggerGet = `-- name: DataloggerGet :one +select id, sn, project_id, created_by, created_by_username, created_at, updated_by, updated_by_username, updated_at, name, slug, model_id, model, errors, tables from v_datalogger where id=$1 +` + +func (q *Queries) DataloggerGet(ctx context.Context, id uuid.UUID) (VDatalogger, error) { + row := q.db.QueryRow(ctx, dataloggerGet, id) + var i VDatalogger + err := row.Scan( + &i.ID, + &i.Sn, + &i.ProjectID, + &i.CreatedBy, + &i.CreatedByUsername, + &i.CreatedAt, + &i.UpdatedBy, + &i.UpdatedByUsername, + &i.UpdatedAt, + &i.Name, + &i.Slug, + &i.ModelID, + &i.Model, + &i.Errors, + &i.Tables, + ) + return i, err +} + +const dataloggerGetActive = `-- name: DataloggerGetActive :one +select exists (select true from v_datalogger where model=$1 and sn=$2) +` + +type DataloggerGetActiveParams struct { + Model *string `json:"model"` + Sn string `json:"sn"` +} + +func (q *Queries) DataloggerGetActive(ctx context.Context, arg DataloggerGetActiveParams) (bool, error) { + row := q.db.QueryRow(ctx, dataloggerGetActive, arg.Model, arg.Sn) + var exists bool + err := row.Scan(&exists) + return exists, err +} + +const dataloggerGetExists = `-- name: DataloggerGetExists :one +select true from v_datalogger where id=$1 +` + +func (q *Queries) DataloggerGetExists(ctx context.Context, id uuid.UUID) (bool, error) { + row := q.db.QueryRow(ctx, dataloggerGetExists, id) + var column_1 bool + err := row.Scan(&column_1) + return column_1, err +} + +const dataloggerGetModelName = `-- name: DataloggerGetModelName :one +select model from datalogger_model where id=$1 +` + +func (q *Queries) DataloggerGetModelName(ctx context.Context, id uuid.UUID) (*string, error) { + row := q.db.QueryRow(ctx, dataloggerGetModelName, id) + var model *string + err := row.Scan(&model) + return model, err +} + +const dataloggerHashCreate = `-- name: DataloggerHashCreate :exec +insert into datalogger_hash (datalogger_id, "hash") values ($1, $2) +` + +type DataloggerHashCreateParams struct { + DataloggerID uuid.UUID `json:"datalogger_id"` + Hash string `json:"hash"` +} + +func (q *Queries) DataloggerHashCreate(ctx context.Context, arg DataloggerHashCreateParams) error { + _, err := q.db.Exec(ctx, dataloggerHashCreate, arg.DataloggerID, arg.Hash) + return err +} + +const dataloggerHashUpdate = `-- name: DataloggerHashUpdate :exec +update datalogger_hash set "hash"=$2 where datalogger_id=$1 +` + +type DataloggerHashUpdateParams struct { + DataloggerID uuid.UUID `json:"datalogger_id"` + Hash string `json:"hash"` +} + +func (q *Queries) DataloggerHashUpdate(ctx context.Context, arg DataloggerHashUpdateParams) error { + _, err := q.db.Exec(ctx, dataloggerHashUpdate, arg.DataloggerID, arg.Hash) + return err +} + +const dataloggerList = `-- name: DataloggerList :many +select id, sn, project_id, created_by, created_by_username, created_at, updated_by, updated_by_username, updated_at, name, slug, model_id, model, errors, tables from v_datalogger +` + +func (q *Queries) DataloggerList(ctx context.Context) ([]VDatalogger, error) { + rows, err := q.db.Query(ctx, dataloggerList) + if err != nil { + return nil, err + } + defer rows.Close() + items := []VDatalogger{} + for rows.Next() { + var i VDatalogger + if err := rows.Scan( + &i.ID, + &i.Sn, + &i.ProjectID, + &i.CreatedBy, + &i.CreatedByUsername, + &i.CreatedAt, + &i.UpdatedBy, + &i.UpdatedByUsername, + &i.UpdatedAt, + &i.Name, + &i.Slug, + &i.ModelID, + &i.Model, + &i.Errors, + &i.Tables, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const dataloggerListForProject = `-- name: DataloggerListForProject :many +select id, sn, project_id, created_by, created_by_username, created_at, updated_by, updated_by_username, updated_at, name, slug, model_id, model, errors, tables from v_datalogger where project_id=$1 +` + +func (q *Queries) DataloggerListForProject(ctx context.Context, projectID uuid.UUID) ([]VDatalogger, error) { + rows, err := q.db.Query(ctx, dataloggerListForProject, projectID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []VDatalogger{} + for rows.Next() { + var i VDatalogger + if err := rows.Scan( + &i.ID, + &i.Sn, + &i.ProjectID, + &i.CreatedBy, + &i.CreatedByUsername, + &i.CreatedAt, + &i.UpdatedBy, + &i.UpdatedByUsername, + &i.UpdatedAt, + &i.Name, + &i.Slug, + &i.ModelID, + &i.Model, + &i.Errors, + &i.Tables, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const dataloggerTableDelete = `-- name: DataloggerTableDelete :exec +delete from datalogger_table where id=$1 +` + +func (q *Queries) DataloggerTableDelete(ctx context.Context, id uuid.UUID) error { + _, err := q.db.Exec(ctx, dataloggerTableDelete, id) + return err +} + +const dataloggerTableGetOrCreate = `-- name: DataloggerTableGetOrCreate :one +with new_datalogger_table as ( + insert into datalogger_table (datalogger_id, table_name) values ($1, $2) + on conflict on constraint datalogger_table_datalogger_id_table_name_key do nothing + returning id +) +select ndt.id from new_datalogger_table ndt +union +select sdt.id from datalogger_table sdt where sdt.datalogger_id=$1 and sdt.table_name=$2 +` + +type DataloggerTableGetOrCreateParams struct { + DataloggerID uuid.UUID `json:"datalogger_id"` + TableName string `json:"table_name"` +} + +func (q *Queries) DataloggerTableGetOrCreate(ctx context.Context, arg DataloggerTableGetOrCreateParams) (uuid.UUID, error) { + row := q.db.QueryRow(ctx, dataloggerTableGetOrCreate, arg.DataloggerID, arg.TableName) + var id uuid.UUID + err := row.Scan(&id) + return id, err +} + +const dataloggerTablePreviewGet = `-- name: DataloggerTablePreviewGet :one +select datalogger_table_id, preview, updated_at from v_datalogger_preview where datalogger_table_id=$1 limit 1 +` + +func (q *Queries) DataloggerTablePreviewGet(ctx context.Context, dataloggerTableID uuid.UUID) (VDataloggerPreview, error) { + row := q.db.QueryRow(ctx, dataloggerTablePreviewGet, dataloggerTableID) + var i VDataloggerPreview + err := row.Scan(&i.DataloggerTableID, &i.Preview, &i.UpdatedAt) + return i, err +} + +const dataloggerTableUpdateNameIfEmpty = `-- name: DataloggerTableUpdateNameIfEmpty :exec +update datalogger_table dt +set table_name=$2 +where dt.table_name='' and dt.datalogger_id=$1 +and not exists ( + select 1 from datalogger_table sdt where sdt.datalogger_id=$1 and sdt.table_name=$2 +) +` + +type DataloggerTableUpdateNameIfEmptyParams struct { + DataloggerID uuid.UUID `json:"datalogger_id"` + TableName string `json:"table_name"` +} + +func (q *Queries) DataloggerTableUpdateNameIfEmpty(ctx context.Context, arg DataloggerTableUpdateNameIfEmptyParams) error { + _, err := q.db.Exec(ctx, dataloggerTableUpdateNameIfEmpty, arg.DataloggerID, arg.TableName) + return err +} + +const dataloggerUpdate = `-- name: DataloggerUpdate :exec +update datalogger set + name=$2, + updated_by=$3, + updated_at=$4 +where id=$1 +` + +type DataloggerUpdateParams struct { + ID uuid.UUID `json:"id"` + Name string `json:"name"` + UpdatedBy *uuid.UUID `json:"updated_by"` + UpdatedAt *time.Time `json:"updated_at"` +} + +func (q *Queries) DataloggerUpdate(ctx context.Context, arg DataloggerUpdateParams) error { + _, err := q.db.Exec(ctx, dataloggerUpdate, + arg.ID, + arg.Name, + arg.UpdatedBy, + arg.UpdatedAt, + ) + return err +} + +const dataloggerUpdateAuditInfo = `-- name: DataloggerUpdateAuditInfo :exec +update datalogger set updated_by=$2, updated_at=$3 where id=$1 +` + +type DataloggerUpdateAuditInfoParams struct { + ID uuid.UUID `json:"id"` + UpdatedBy *uuid.UUID `json:"updated_by"` + UpdatedAt *time.Time `json:"updated_at"` +} + +func (q *Queries) DataloggerUpdateAuditInfo(ctx context.Context, arg DataloggerUpdateAuditInfoParams) error { + _, err := q.db.Exec(ctx, dataloggerUpdateAuditInfo, arg.ID, arg.UpdatedBy, arg.UpdatedAt) + return err +} + +const dataloggerUpdateTableNameBlank = `-- name: DataloggerUpdateTableNameBlank :exec +update datalogger_table set table_name='' where id=$1 +` + +func (q *Queries) DataloggerUpdateTableNameBlank(ctx context.Context, id uuid.UUID) error { + _, err := q.db.Exec(ctx, dataloggerUpdateTableNameBlank, id) + return err +} diff --git a/api/internal/db/datalogger_telemetry.sql_gen.go b/api/internal/db/datalogger_telemetry.sql_gen.go new file mode 100644 index 00000000..4d3f7f0a --- /dev/null +++ b/api/internal/db/datalogger_telemetry.sql_gen.go @@ -0,0 +1,137 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.27.0 +// source: datalogger_telemetry.sql + +package db + +import ( + "context" + "time" + + "github.com/google/uuid" +) + +const dataloggerErrorCreate = `-- name: DataloggerErrorCreate :exec +insert into datalogger_error (datalogger_table_id, error_message) +select dt.id, $3 from datalogger_table dt +where dt.datalogger_id = $1 and dt.table_name = $2 +and not exists ( + select 1 from datalogger_table sdt where sdt.datalogger_id = $1 and sdt.table_name = $2 +) +` + +type DataloggerErrorCreateParams struct { + DataloggerID uuid.UUID `json:"datalogger_id"` + TableName string `json:"table_name"` + ErrorMessage *string `json:"error_message"` +} + +func (q *Queries) DataloggerErrorCreate(ctx context.Context, arg DataloggerErrorCreateParams) error { + _, err := q.db.Exec(ctx, dataloggerErrorCreate, arg.DataloggerID, arg.TableName, arg.ErrorMessage) + return err +} + +const dataloggerErrorDelete = `-- name: DataloggerErrorDelete :exec +delete from datalogger_error +where datalogger_table_id = any(select dt.id from datalogger_table dt where dt.datalogger_id = $1 and dt.table_name = $2) +` + +type DataloggerErrorDeleteParams struct { + DataloggerID uuid.UUID `json:"datalogger_id"` + TableName string `json:"table_name"` +} + +func (q *Queries) DataloggerErrorDelete(ctx context.Context, arg DataloggerErrorDeleteParams) error { + _, err := q.db.Exec(ctx, dataloggerErrorDelete, arg.DataloggerID, arg.TableName) + return err +} + +const dataloggerGetForModelSn = `-- name: DataloggerGetForModelSn :one +select id, sn, project_id, created_by, created_by_username, created_at, updated_by, updated_by_username, updated_at, name, slug, model_id, model, errors, tables from v_datalogger +where model = $1 and sn = $2 +limit 1 +` + +type DataloggerGetForModelSnParams struct { + Model *string `json:"model"` + Sn string `json:"sn"` +} + +func (q *Queries) DataloggerGetForModelSn(ctx context.Context, arg DataloggerGetForModelSnParams) (VDatalogger, error) { + row := q.db.QueryRow(ctx, dataloggerGetForModelSn, arg.Model, arg.Sn) + var i VDatalogger + err := row.Scan( + &i.ID, + &i.Sn, + &i.ProjectID, + &i.CreatedBy, + &i.CreatedByUsername, + &i.CreatedAt, + &i.UpdatedBy, + &i.UpdatedByUsername, + &i.UpdatedAt, + &i.Name, + &i.Slug, + &i.ModelID, + &i.Model, + &i.Errors, + &i.Tables, + ) + return i, err +} + +const dataloggerHashGetForModelSn = `-- name: DataloggerHashGetForModelSn :one +select "hash" from v_datalogger_hash +where model = $1 and sn = $2 +limit 1 +` + +type DataloggerHashGetForModelSnParams struct { + Model *string `json:"model"` + Sn string `json:"sn"` +} + +func (q *Queries) DataloggerHashGetForModelSn(ctx context.Context, arg DataloggerHashGetForModelSnParams) (string, error) { + row := q.db.QueryRow(ctx, dataloggerHashGetForModelSn, arg.Model, arg.Sn) + var hash string + err := row.Scan(&hash) + return hash, err +} + +const dataloggerTablePreviewCreate = `-- name: DataloggerTablePreviewCreate :exec +insert into datalogger_preview (datalogger_table_id, preview, updated_at) values ($1, $2, $3) +` + +type DataloggerTablePreviewCreateParams struct { + DataloggerTableID uuid.UUID `json:"datalogger_table_id"` + Preview []byte `json:"preview"` + UpdatedAt time.Time `json:"updated_at"` +} + +func (q *Queries) DataloggerTablePreviewCreate(ctx context.Context, arg DataloggerTablePreviewCreateParams) error { + _, err := q.db.Exec(ctx, dataloggerTablePreviewCreate, arg.DataloggerTableID, arg.Preview, arg.UpdatedAt) + return err +} + +const dataloggerTablePreviewUpdate = `-- name: DataloggerTablePreviewUpdate :exec +update datalogger_preview set preview = $3, updated_at = $4 +where datalogger_table_id = any(select dt.id from datalogger_table dt where dt.datalogger_id = $1 and dt.table_name = $2) +` + +type DataloggerTablePreviewUpdateParams struct { + DataloggerID uuid.UUID `json:"datalogger_id"` + TableName string `json:"table_name"` + Preview []byte `json:"preview"` + UpdatedAt time.Time `json:"updated_at"` +} + +func (q *Queries) DataloggerTablePreviewUpdate(ctx context.Context, arg DataloggerTablePreviewUpdateParams) error { + _, err := q.db.Exec(ctx, dataloggerTablePreviewUpdate, + arg.DataloggerID, + arg.TableName, + arg.Preview, + arg.UpdatedAt, + ) + return err +} diff --git a/api/internal/db/db.go b/api/internal/db/db.go new file mode 100644 index 00000000..8c84b4d7 --- /dev/null +++ b/api/internal/db/db.go @@ -0,0 +1,33 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.27.0 + +package db + +import ( + "context" + + "github.com/jackc/pgx/v5" + "github.com/jackc/pgx/v5/pgconn" +) + +type DBTX interface { + Exec(context.Context, string, ...interface{}) (pgconn.CommandTag, error) + Query(context.Context, string, ...interface{}) (pgx.Rows, error) + QueryRow(context.Context, string, ...interface{}) pgx.Row + SendBatch(context.Context, *pgx.Batch) pgx.BatchResults +} + +func New(db DBTX) *Queries { + return &Queries{db: db} +} + +type Queries struct { + db DBTX +} + +func (q *Queries) WithTx(tx pgx.Tx) *Queries { + return &Queries{ + db: tx, + } +} diff --git a/api/internal/db/district_rollup.sql_gen.go b/api/internal/db/district_rollup.sql_gen.go new file mode 100644 index 00000000..a322db89 --- /dev/null +++ b/api/internal/db/district_rollup.sql_gen.go @@ -0,0 +1,105 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.27.0 +// source: district_rollup.sql + +package db + +import ( + "context" + "time" + + "github.com/google/uuid" +) + +const districtRollupListEvaluationForProjectAlertConfig = `-- name: DistrictRollupListEvaluationForProjectAlertConfig :many +select alert_type_id, office_id, district_initials, project_name, project_id, month, expected_total_submittals, actual_total_submittals, red_submittals, yellow_submittals, green_submittals from v_district_rollup +where alert_type_id = 'da6ee89e-58cc-4d85-8384-43c3c33a68bd'::uuid +and project_id=$1 +and "month" >= date_trunc('month', $2::timestamptz) +and "month" <= date_trunc('month', $3::timestamptz) +` + +type DistrictRollupListEvaluationForProjectAlertConfigParams struct { + ProjectID uuid.UUID `json:"project_id"` + StartMonthTime time.Time `json:"start_month_time"` + EndMonthTime time.Time `json:"end_month_time"` +} + +func (q *Queries) DistrictRollupListEvaluationForProjectAlertConfig(ctx context.Context, arg DistrictRollupListEvaluationForProjectAlertConfigParams) ([]VDistrictRollup, error) { + rows, err := q.db.Query(ctx, districtRollupListEvaluationForProjectAlertConfig, arg.ProjectID, arg.StartMonthTime, arg.EndMonthTime) + if err != nil { + return nil, err + } + defer rows.Close() + items := []VDistrictRollup{} + for rows.Next() { + var i VDistrictRollup + if err := rows.Scan( + &i.AlertTypeID, + &i.OfficeID, + &i.DistrictInitials, + &i.ProjectName, + &i.ProjectID, + &i.Month, + &i.ExpectedTotalSubmittals, + &i.ActualTotalSubmittals, + &i.RedSubmittals, + &i.YellowSubmittals, + &i.GreenSubmittals, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const districtRollupListMeasurementForProjectAlertConfig = `-- name: DistrictRollupListMeasurementForProjectAlertConfig :many +select alert_type_id, office_id, district_initials, project_name, project_id, month, expected_total_submittals, actual_total_submittals, red_submittals, yellow_submittals, green_submittals from v_district_rollup +where alert_type_id = '97e7a25c-d5c7-4ded-b272-1bb6e5914fe3'::uuid +and project_id=$1 +and "month" >= date_trunc('month', $2::timestamptz) +and "month" <= date_trunc('month', $3::timestamptz) +` + +type DistrictRollupListMeasurementForProjectAlertConfigParams struct { + ProjectID uuid.UUID `json:"project_id"` + StartMonthTime time.Time `json:"start_month_time"` + EndMonthTime time.Time `json:"end_month_time"` +} + +func (q *Queries) DistrictRollupListMeasurementForProjectAlertConfig(ctx context.Context, arg DistrictRollupListMeasurementForProjectAlertConfigParams) ([]VDistrictRollup, error) { + rows, err := q.db.Query(ctx, districtRollupListMeasurementForProjectAlertConfig, arg.ProjectID, arg.StartMonthTime, arg.EndMonthTime) + if err != nil { + return nil, err + } + defer rows.Close() + items := []VDistrictRollup{} + for rows.Next() { + var i VDistrictRollup + if err := rows.Scan( + &i.AlertTypeID, + &i.OfficeID, + &i.DistrictInitials, + &i.ProjectName, + &i.ProjectID, + &i.Month, + &i.ExpectedTotalSubmittals, + &i.ActualTotalSubmittals, + &i.RedSubmittals, + &i.YellowSubmittals, + &i.GreenSubmittals, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} diff --git a/api/internal/db/domain.sql_gen.go b/api/internal/db/domain.sql_gen.go new file mode 100644 index 00000000..382dce78 --- /dev/null +++ b/api/internal/db/domain.sql_gen.go @@ -0,0 +1,99 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.27.0 +// source: domain.sql + +package db + +import ( + "context" +) + +const domainGroupList = `-- name: DomainGroupList :many +select "group", opts from v_domain_group +` + +func (q *Queries) DomainGroupList(ctx context.Context) ([]VDomainGroup, error) { + rows, err := q.db.Query(ctx, domainGroupList) + if err != nil { + return nil, err + } + defer rows.Close() + items := []VDomainGroup{} + for rows.Next() { + var i VDomainGroup + if err := rows.Scan(&i.Group, &i.Opts); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const domainList = `-- name: DomainList :many +select id, "group", value, description from v_domain +` + +func (q *Queries) DomainList(ctx context.Context) ([]VDomain, error) { + rows, err := q.db.Query(ctx, domainList) + if err != nil { + return nil, err + } + defer rows.Close() + items := []VDomain{} + for rows.Next() { + var i VDomain + if err := rows.Scan( + &i.ID, + &i.Group, + &i.Value, + &i.Description, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const pgTimezoneNamesList = `-- name: PgTimezoneNamesList :many +select name, abbrev, utc_offset::text, is_dst from pg_catalog.pg_timezone_names +` + +type PgTimezoneNamesListRow struct { + Name *string `json:"name"` + Abbrev *string `json:"abbrev"` + UtcOffset string `json:"utc_offset"` + IsDst *bool `json:"is_dst"` +} + +func (q *Queries) PgTimezoneNamesList(ctx context.Context) ([]PgTimezoneNamesListRow, error) { + rows, err := q.db.Query(ctx, pgTimezoneNamesList) + if err != nil { + return nil, err + } + defer rows.Close() + items := []PgTimezoneNamesListRow{} + for rows.Next() { + var i PgTimezoneNamesListRow + if err := rows.Scan( + &i.Name, + &i.Abbrev, + &i.UtcOffset, + &i.IsDst, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} diff --git a/api/internal/db/equivalency_table.sql_gen.go b/api/internal/db/equivalency_table.sql_gen.go new file mode 100644 index 00000000..7f74b979 --- /dev/null +++ b/api/internal/db/equivalency_table.sql_gen.go @@ -0,0 +1,137 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.27.0 +// source: equivalency_table.sql + +package db + +import ( + "context" + + "github.com/google/uuid" +) + +const dataloggerTableGetIsValid = `-- name: DataloggerTableGetIsValid :one +select not exists (select id, datalogger_id, table_name from datalogger_table where id = $1 and table_name = 'preparse') +` + +func (q *Queries) DataloggerTableGetIsValid(ctx context.Context, id uuid.UUID) (bool, error) { + row := q.db.QueryRow(ctx, dataloggerTableGetIsValid, id) + var not_exists bool + err := row.Scan(¬_exists) + return not_exists, err +} + +const equivalencyTableCreateOrUpdate = `-- name: EquivalencyTableCreateOrUpdate :exec +insert into datalogger_equivalency_table +(datalogger_id, datalogger_table_id, field_name, display_name, instrument_id, timeseries_id) +values ($1, $2, $3, $4, $5, $6) +on conflict on constraint datalogger_equivalency_table_datalogger_table_id_field_name_key +do update set display_name = excluded.display_name, instrument_id = excluded.instrument_id, timeseries_id = excluded.timeseries_id +` + +type EquivalencyTableCreateOrUpdateParams struct { + DataloggerID uuid.UUID `json:"datalogger_id"` + DataloggerTableID *uuid.UUID `json:"datalogger_table_id"` + FieldName string `json:"field_name"` + DisplayName *string `json:"display_name"` + InstrumentID *uuid.UUID `json:"instrument_id"` + TimeseriesID *uuid.UUID `json:"timeseries_id"` +} + +func (q *Queries) EquivalencyTableCreateOrUpdate(ctx context.Context, arg EquivalencyTableCreateOrUpdateParams) error { + _, err := q.db.Exec(ctx, equivalencyTableCreateOrUpdate, + arg.DataloggerID, + arg.DataloggerTableID, + arg.FieldName, + arg.DisplayName, + arg.InstrumentID, + arg.TimeseriesID, + ) + return err +} + +const equivalencyTableDelete = `-- name: EquivalencyTableDelete :exec +delete from datalogger_equivalency_table where id = $1 +` + +func (q *Queries) EquivalencyTableDelete(ctx context.Context, id uuid.UUID) error { + _, err := q.db.Exec(ctx, equivalencyTableDelete, id) + return err +} + +const equivalencyTableDeleteForDataloggerTable = `-- name: EquivalencyTableDeleteForDataloggerTable :exec +delete from datalogger_equivalency_table where datalogger_table_id = $1 +` + +func (q *Queries) EquivalencyTableDeleteForDataloggerTable(ctx context.Context, dataloggerTableID *uuid.UUID) error { + _, err := q.db.Exec(ctx, equivalencyTableDeleteForDataloggerTable, dataloggerTableID) + return err +} + +const equivalencyTableGet = `-- name: EquivalencyTableGet :one +select + datalogger_id, + datalogger_table_id, + datalogger_table_name, + fields +from v_datalogger_equivalency_table +where datalogger_table_id = $1 +` + +func (q *Queries) EquivalencyTableGet(ctx context.Context, dataloggerTableID uuid.UUID) (VDataloggerEquivalencyTable, error) { + row := q.db.QueryRow(ctx, equivalencyTableGet, dataloggerTableID) + var i VDataloggerEquivalencyTable + err := row.Scan( + &i.DataloggerID, + &i.DataloggerTableID, + &i.DataloggerTableName, + &i.Fields, + ) + return i, err +} + +const equivalencyTableTimeseriesGetIsValid = `-- name: EquivalencyTableTimeseriesGetIsValid :one +select not exists ( + select id from v_timeseries_computed + where id = $1 + union all + select timeseries_id from instrument_constants + where timeseries_id = $1 +) +` + +func (q *Queries) EquivalencyTableTimeseriesGetIsValid(ctx context.Context, id uuid.UUID) (bool, error) { + row := q.db.QueryRow(ctx, equivalencyTableTimeseriesGetIsValid, id) + var not_exists bool + err := row.Scan(¬_exists) + return not_exists, err +} + +const equivalencyTableUpdate = `-- name: EquivalencyTableUpdate :exec +update datalogger_equivalency_table set + field_name = $2, + display_name = $3, + instrument_id = $4, + timeseries_id = $5 +where id = $1 +` + +type EquivalencyTableUpdateParams struct { + ID uuid.UUID `json:"id"` + FieldName string `json:"field_name"` + DisplayName *string `json:"display_name"` + InstrumentID *uuid.UUID `json:"instrument_id"` + TimeseriesID *uuid.UUID `json:"timeseries_id"` +} + +func (q *Queries) EquivalencyTableUpdate(ctx context.Context, arg EquivalencyTableUpdateParams) error { + _, err := q.db.Exec(ctx, equivalencyTableUpdate, + arg.ID, + arg.FieldName, + arg.DisplayName, + arg.InstrumentID, + arg.TimeseriesID, + ) + return err +} diff --git a/api/internal/db/evaluation.sql_gen.go b/api/internal/db/evaluation.sql_gen.go new file mode 100644 index 00000000..c1747d7f --- /dev/null +++ b/api/internal/db/evaluation.sql_gen.go @@ -0,0 +1,346 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.27.0 +// source: evaluation.sql + +package db + +import ( + "context" + "time" + + "github.com/google/uuid" +) + +const evaluationCreate = `-- name: EvaluationCreate :one +insert into evaluation ( + project_id, + submittal_id, + name, + body, + started_at, + ended_at, + created_by, + created_at +) values ($1,$2,$3,$4,$5,$6,$7,$8) +returning id +` + +type EvaluationCreateParams struct { + ProjectID uuid.UUID `json:"project_id"` + SubmittalID *uuid.UUID `json:"submittal_id"` + Name string `json:"name"` + Body string `json:"body"` + StartedAt time.Time `json:"started_at"` + EndedAt time.Time `json:"ended_at"` + CreatedBy uuid.UUID `json:"created_by"` + CreatedAt time.Time `json:"created_at"` +} + +func (q *Queries) EvaluationCreate(ctx context.Context, arg EvaluationCreateParams) (uuid.UUID, error) { + row := q.db.QueryRow(ctx, evaluationCreate, + arg.ProjectID, + arg.SubmittalID, + arg.Name, + arg.Body, + arg.StartedAt, + arg.EndedAt, + arg.CreatedBy, + arg.CreatedAt, + ) + var id uuid.UUID + err := row.Scan(&id) + return id, err +} + +const evaluationDelete = `-- name: EvaluationDelete :exec +delete from evaluation where id=$1 +` + +func (q *Queries) EvaluationDelete(ctx context.Context, id uuid.UUID) error { + _, err := q.db.Exec(ctx, evaluationDelete, id) + return err +} + +const evaluationGet = `-- name: EvaluationGet :one +select id, name, body, created_by, created_by_username, created_at, updated_by, updated_by_username, updated_at, project_id, project_name, alert_config_id, alert_config_name, submittal_id, started_at, ended_at, instruments from v_evaluation where id=$1 +` + +func (q *Queries) EvaluationGet(ctx context.Context, id uuid.UUID) (VEvaluation, error) { + row := q.db.QueryRow(ctx, evaluationGet, id) + var i VEvaluation + err := row.Scan( + &i.ID, + &i.Name, + &i.Body, + &i.CreatedBy, + &i.CreatedByUsername, + &i.CreatedAt, + &i.UpdatedBy, + &i.UpdatedByUsername, + &i.UpdatedAt, + &i.ProjectID, + &i.ProjectName, + &i.AlertConfigID, + &i.AlertConfigName, + &i.SubmittalID, + &i.StartedAt, + &i.EndedAt, + &i.Instruments, + ) + return i, err +} + +const evaluationInstrumentCreate = `-- name: EvaluationInstrumentCreate :exec +insert into evaluation_instrument (evaluation_id, instrument_id) values ($1,$2) +` + +type EvaluationInstrumentCreateParams struct { + EvaluationID *uuid.UUID `json:"evaluation_id"` + InstrumentID *uuid.UUID `json:"instrument_id"` +} + +func (q *Queries) EvaluationInstrumentCreate(ctx context.Context, arg EvaluationInstrumentCreateParams) error { + _, err := q.db.Exec(ctx, evaluationInstrumentCreate, arg.EvaluationID, arg.InstrumentID) + return err +} + +const evaluationInstrumentDeleteForEvaluation = `-- name: EvaluationInstrumentDeleteForEvaluation :exec +delete from evaluation_instrument where evaluation_id=$1 +` + +func (q *Queries) EvaluationInstrumentDeleteForEvaluation(ctx context.Context, evaluationID *uuid.UUID) error { + _, err := q.db.Exec(ctx, evaluationInstrumentDeleteForEvaluation, evaluationID) + return err +} + +const evaluationListForInstrument = `-- name: EvaluationListForInstrument :many +select id, name, body, created_by, created_by_username, created_at, updated_by, updated_by_username, updated_at, project_id, project_name, alert_config_id, alert_config_name, submittal_id, started_at, ended_at, instruments from v_evaluation +where id = any( + select evaluation_id + from evaluation_instrument + where instrument_id=$1 +) +` + +func (q *Queries) EvaluationListForInstrument(ctx context.Context, instrumentID *uuid.UUID) ([]VEvaluation, error) { + rows, err := q.db.Query(ctx, evaluationListForInstrument, instrumentID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []VEvaluation{} + for rows.Next() { + var i VEvaluation + if err := rows.Scan( + &i.ID, + &i.Name, + &i.Body, + &i.CreatedBy, + &i.CreatedByUsername, + &i.CreatedAt, + &i.UpdatedBy, + &i.UpdatedByUsername, + &i.UpdatedAt, + &i.ProjectID, + &i.ProjectName, + &i.AlertConfigID, + &i.AlertConfigName, + &i.SubmittalID, + &i.StartedAt, + &i.EndedAt, + &i.Instruments, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const evaluationListForProject = `-- name: EvaluationListForProject :many +select id, name, body, created_by, created_by_username, created_at, updated_by, updated_by_username, updated_at, project_id, project_name, alert_config_id, alert_config_name, submittal_id, started_at, ended_at, instruments +from v_evaluation +where project_id=$1 +` + +func (q *Queries) EvaluationListForProject(ctx context.Context, projectID uuid.UUID) ([]VEvaluation, error) { + rows, err := q.db.Query(ctx, evaluationListForProject, projectID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []VEvaluation{} + for rows.Next() { + var i VEvaluation + if err := rows.Scan( + &i.ID, + &i.Name, + &i.Body, + &i.CreatedBy, + &i.CreatedByUsername, + &i.CreatedAt, + &i.UpdatedBy, + &i.UpdatedByUsername, + &i.UpdatedAt, + &i.ProjectID, + &i.ProjectName, + &i.AlertConfigID, + &i.AlertConfigName, + &i.SubmittalID, + &i.StartedAt, + &i.EndedAt, + &i.Instruments, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const evaluationListForProjectAlertConfig = `-- name: EvaluationListForProjectAlertConfig :many +select id, name, body, created_by, created_by_username, created_at, updated_by, updated_by_username, updated_at, project_id, project_name, alert_config_id, alert_config_name, submittal_id, started_at, ended_at, instruments from v_evaluation +where project_id=$1 +and alert_config_id is not null +and alert_config_id=$2 +` + +type EvaluationListForProjectAlertConfigParams struct { + ProjectID uuid.UUID `json:"project_id"` + AlertConfigID *uuid.UUID `json:"alert_config_id"` +} + +func (q *Queries) EvaluationListForProjectAlertConfig(ctx context.Context, arg EvaluationListForProjectAlertConfigParams) ([]VEvaluation, error) { + rows, err := q.db.Query(ctx, evaluationListForProjectAlertConfig, arg.ProjectID, arg.AlertConfigID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []VEvaluation{} + for rows.Next() { + var i VEvaluation + if err := rows.Scan( + &i.ID, + &i.Name, + &i.Body, + &i.CreatedBy, + &i.CreatedByUsername, + &i.CreatedAt, + &i.UpdatedBy, + &i.UpdatedByUsername, + &i.UpdatedAt, + &i.ProjectID, + &i.ProjectName, + &i.AlertConfigID, + &i.AlertConfigName, + &i.SubmittalID, + &i.StartedAt, + &i.EndedAt, + &i.Instruments, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const evaluationUpdate = `-- name: EvaluationUpdate :exec +update evaluation set + name=$3, + body=$4, + started_at=$5, + ended_at=$6, + updated_by=$7, + updated_at=$8 +where id=$1 and project_id=$2 +` + +type EvaluationUpdateParams struct { + ID uuid.UUID `json:"id"` + ProjectID uuid.UUID `json:"project_id"` + Name string `json:"name"` + Body string `json:"body"` + StartedAt time.Time `json:"started_at"` + EndedAt time.Time `json:"ended_at"` + UpdatedBy *uuid.UUID `json:"updated_by"` + UpdatedAt *time.Time `json:"updated_at"` +} + +func (q *Queries) EvaluationUpdate(ctx context.Context, arg EvaluationUpdateParams) error { + _, err := q.db.Exec(ctx, evaluationUpdate, + arg.ID, + arg.ProjectID, + arg.Name, + arg.Body, + arg.StartedAt, + arg.EndedAt, + arg.UpdatedBy, + arg.UpdatedAt, + ) + return err +} + +const submittalCreateNextEvaluation = `-- name: SubmittalCreateNextEvaluation :exec +insert into submittal (alert_config_id, due_at) +select + ac.id, + now() + ac.schedule_interval +from alert_config ac +where ac.id = any(select sub.alert_config_id from submittal sub where sub.id=$1) +` + +func (q *Queries) SubmittalCreateNextEvaluation(ctx context.Context, id uuid.UUID) error { + _, err := q.db.Exec(ctx, submittalCreateNextEvaluation, id) + return err +} + +const submittalUpdateCompleteEvaluation = `-- name: SubmittalUpdateCompleteEvaluation :one +update submittal sub1 set + submittal_status_id = sq.submittal_status_id, + completed_at = now() +from ( + select + sub2.id as submittal_id, + case + -- if completed before due date, mark submittal as green id + when now() <= sub2.due_at then '0c0d6487-3f71-4121-8575-19514c7b9f03'::uuid + -- if completed after due date, mark as yellow + else 'ef9a3235-f6e2-4e6c-92f6-760684308f7f'::uuid + end as submittal_status_id + from submittal sub2 + inner join alert_config ac on sub2.alert_config_id = ac.id + where sub2.id=$1 + and sub2.completed_at is null + and not sub2.marked_as_missing + and ac.alert_type_id = 'da6ee89e-58cc-4d85-8384-43c3c33a68bd'::uuid +) sq +where sub1.id = sq.submittal_id +returning sub1.id, sub1.alert_config_id, sub1.submittal_status_id, sub1.completed_at, sub1.created_at, sub1.due_at, sub1.marked_as_missing, sub1.warning_sent +` + +func (q *Queries) SubmittalUpdateCompleteEvaluation(ctx context.Context, id uuid.UUID) (Submittal, error) { + row := q.db.QueryRow(ctx, submittalUpdateCompleteEvaluation, id) + var i Submittal + err := row.Scan( + &i.ID, + &i.AlertConfigID, + &i.SubmittalStatusID, + &i.CompletedAt, + &i.CreatedAt, + &i.DueAt, + &i.MarkedAsMissing, + &i.WarningSent, + ) + return i, err +} diff --git a/api/internal/db/heartbeat.sql_gen.go b/api/internal/db/heartbeat.sql_gen.go new file mode 100644 index 00000000..6a731eb0 --- /dev/null +++ b/api/internal/db/heartbeat.sql_gen.go @@ -0,0 +1,59 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.27.0 +// source: heartbeat.sql + +package db + +import ( + "context" + "time" +) + +const heartbeatCreate = `-- name: HeartbeatCreate :one +insert into heartbeat (time) values ($1) returning time +` + +func (q *Queries) HeartbeatCreate(ctx context.Context, argTime time.Time) (time.Time, error) { + row := q.db.QueryRow(ctx, heartbeatCreate, argTime) + var time time.Time + err := row.Scan(&time) + return time, err +} + +const heartbeatGetLatest = `-- name: HeartbeatGetLatest :one +select max(time)::timestamptz from heartbeat +` + +func (q *Queries) HeartbeatGetLatest(ctx context.Context) (time.Time, error) { + row := q.db.QueryRow(ctx, heartbeatGetLatest) + var column_1 time.Time + err := row.Scan(&column_1) + return column_1, err +} + +const heartbeatList = `-- name: HeartbeatList :many +select time from heartbeat +order by time desc +limit $1 +` + +func (q *Queries) HeartbeatList(ctx context.Context, resultLimit int32) ([]time.Time, error) { + rows, err := q.db.Query(ctx, heartbeatList, resultLimit) + if err != nil { + return nil, err + } + defer rows.Close() + items := []time.Time{} + for rows.Next() { + var time time.Time + if err := rows.Scan(&time); err != nil { + return nil, err + } + items = append(items, time) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} diff --git a/api/internal/db/home.sql_gen.go b/api/internal/db/home.sql_gen.go new file mode 100644 index 00000000..d6d94bdb --- /dev/null +++ b/api/internal/db/home.sql_gen.go @@ -0,0 +1,40 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.27.0 +// source: home.sql + +package db + +import ( + "context" +) + +const homeGet = `-- name: HomeGet :one +select + (select count(*) from instrument where not deleted) as instrument_count, + (select count(*) from project where not deleted) as project_count, + (select count(*) from instrument_group) as instrument_group_count, + (select count(*) from instrument where not deleted and created_at > now() - '7 days'::interval) as new_instruments_7d, + (select count(*) from timeseries_measurement where time > now() - '2 hours'::interval) as new_measurements_2h +` + +type HomeGetRow struct { + InstrumentCount int64 `json:"instrument_count"` + ProjectCount int64 `json:"project_count"` + InstrumentGroupCount int64 `json:"instrument_group_count"` + NewInstruments7d int64 `json:"new_instruments_7d"` + NewMeasurements2h int64 `json:"new_measurements_2h"` +} + +func (q *Queries) HomeGet(ctx context.Context) (HomeGetRow, error) { + row := q.db.QueryRow(ctx, homeGet) + var i HomeGetRow + err := row.Scan( + &i.InstrumentCount, + &i.ProjectCount, + &i.InstrumentGroupCount, + &i.NewInstruments7d, + &i.NewMeasurements2h, + ) + return i, err +} diff --git a/api/internal/db/instrument.sql_gen.go b/api/internal/db/instrument.sql_gen.go new file mode 100644 index 00000000..01c23c09 --- /dev/null +++ b/api/internal/db/instrument.sql_gen.go @@ -0,0 +1,422 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.27.0 +// source: instrument.sql + +package db + +import ( + "context" + "time" + + "github.com/google/uuid" +) + +const instrumentCreate = `-- name: InstrumentCreate :one +insert into instrument (slug, name, type_id, geometry, station, station_offset, created_by, created_at, nid_id, usgs_id, show_cwms_tab) +values ( + slugify($1, 'instrument'), + $1, + $2, + ST_SetSRID(ST_GeomFromGeoJSON($3::json), 4326), + $4, + $5, + $6, + $7, + $8, + $9, + $10 +) returning id, slug +` + +type InstrumentCreateParams struct { + Name string `json:"name"` + TypeID uuid.UUID `json:"type_id"` + Geometry []byte `json:"geometry"` + Station *int32 `json:"station"` + StationOffset *int32 `json:"station_offset"` + CreatedBy uuid.UUID `json:"created_by"` + CreatedAt time.Time `json:"created_at"` + NidID *string `json:"nid_id"` + UsgsID *string `json:"usgs_id"` + ShowCwmsTab bool `json:"show_cwms_tab"` +} + +type InstrumentCreateRow struct { + ID uuid.UUID `json:"id"` + Slug string `json:"slug"` +} + +func (q *Queries) InstrumentCreate(ctx context.Context, arg InstrumentCreateParams) (InstrumentCreateRow, error) { + row := q.db.QueryRow(ctx, instrumentCreate, + arg.Name, + arg.TypeID, + arg.Geometry, + arg.Station, + arg.StationOffset, + arg.CreatedBy, + arg.CreatedAt, + arg.NidID, + arg.UsgsID, + arg.ShowCwmsTab, + ) + var i InstrumentCreateRow + err := row.Scan(&i.ID, &i.Slug) + return i, err +} + +const instrumentDeleteFlag = `-- name: InstrumentDeleteFlag :exec +update instrument set deleted=true +where id = any( + select instrument_id + from project_instrument + where project_id = $1 +) +and id = $2 +` + +type InstrumentDeleteFlagParams struct { + ProjectID uuid.UUID `json:"project_id"` + ID uuid.UUID `json:"id"` +} + +func (q *Queries) InstrumentDeleteFlag(ctx context.Context, arg InstrumentDeleteFlagParams) error { + _, err := q.db.Exec(ctx, instrumentDeleteFlag, arg.ProjectID, arg.ID) + return err +} + +const instrumentGet = `-- name: InstrumentGet :one +select id, status_id, status, status_time, slug, name, type_id, show_cwms_tab, type, icon, geometry, station, "offset", created_by, created_at, updated_by, updated_at, nid_id, usgs_id, telemetry, has_cwms, projects, constants, groups, alert_configs, opts +from v_instrument +where id = $1 +` + +func (q *Queries) InstrumentGet(ctx context.Context, id uuid.UUID) (VInstrument, error) { + row := q.db.QueryRow(ctx, instrumentGet, id) + var i VInstrument + err := row.Scan( + &i.ID, + &i.StatusID, + &i.Status, + &i.StatusTime, + &i.Slug, + &i.Name, + &i.TypeID, + &i.ShowCwmsTab, + &i.Type, + &i.Icon, + &i.Geometry, + &i.Station, + &i.Offset, + &i.CreatedBy, + &i.CreatedAt, + &i.UpdatedBy, + &i.UpdatedAt, + &i.NidID, + &i.UsgsID, + &i.Telemetry, + &i.HasCwms, + &i.Projects, + &i.Constants, + &i.Groups, + &i.AlertConfigs, + &i.Opts, + ) + return i, err +} + +const instrumentGetCount = `-- name: InstrumentGetCount :one +select count(*) from instrument where not deleted +` + +func (q *Queries) InstrumentGetCount(ctx context.Context) (int64, error) { + row := q.db.QueryRow(ctx, instrumentGetCount) + var count int64 + err := row.Scan(&count) + return count, err +} + +const instrumentIDNameListByIDs = `-- name: InstrumentIDNameListByIDs :many +select id, name +from instrument +where id = any($1::uuid[]) +and not deleted +` + +type InstrumentIDNameListByIDsRow struct { + ID uuid.UUID `json:"id"` + Name string `json:"name"` +} + +func (q *Queries) InstrumentIDNameListByIDs(ctx context.Context, instrumentIds []uuid.UUID) ([]InstrumentIDNameListByIDsRow, error) { + rows, err := q.db.Query(ctx, instrumentIDNameListByIDs, instrumentIds) + if err != nil { + return nil, err + } + defer rows.Close() + items := []InstrumentIDNameListByIDsRow{} + for rows.Next() { + var i InstrumentIDNameListByIDsRow + if err := rows.Scan(&i.ID, &i.Name); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const instrumentListForInstrumentGroup = `-- name: InstrumentListForInstrumentGroup :many +select i.id, i.status_id, i.status, i.status_time, i.slug, i.name, i.type_id, i.show_cwms_tab, i.type, i.icon, i.geometry, i.station, i."offset", i.created_by, i.created_at, i.updated_by, i.updated_at, i.nid_id, i.usgs_id, i.telemetry, i.has_cwms, i.projects, i.constants, i.groups, i.alert_configs, i.opts +from v_instrument i +inner join instrument_group_instruments igi on igi.instrument_id = i.id +where instrument_group_id = $1 +` + +func (q *Queries) InstrumentListForInstrumentGroup(ctx context.Context, instrumentGroupID uuid.UUID) ([]VInstrument, error) { + rows, err := q.db.Query(ctx, instrumentListForInstrumentGroup, instrumentGroupID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []VInstrument{} + for rows.Next() { + var i VInstrument + if err := rows.Scan( + &i.ID, + &i.StatusID, + &i.Status, + &i.StatusTime, + &i.Slug, + &i.Name, + &i.TypeID, + &i.ShowCwmsTab, + &i.Type, + &i.Icon, + &i.Geometry, + &i.Station, + &i.Offset, + &i.CreatedBy, + &i.CreatedAt, + &i.UpdatedBy, + &i.UpdatedAt, + &i.NidID, + &i.UsgsID, + &i.Telemetry, + &i.HasCwms, + &i.Projects, + &i.Constants, + &i.Groups, + &i.AlertConfigs, + &i.Opts, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const instrumentListForProject = `-- name: InstrumentListForProject :many +select i.id, i.status_id, i.status, i.status_time, i.slug, i.name, i.type_id, i.show_cwms_tab, i.type, i.icon, i.geometry, i.station, i."offset", i.created_by, i.created_at, i.updated_by, i.updated_at, i.nid_id, i.usgs_id, i.telemetry, i.has_cwms, i.projects, i.constants, i.groups, i.alert_configs, i.opts +from v_instrument i +inner join project_instrument pi on pi.instrument_id = i.id +where pi.project_id = $1 +` + +func (q *Queries) InstrumentListForProject(ctx context.Context, projectID uuid.UUID) ([]VInstrument, error) { + rows, err := q.db.Query(ctx, instrumentListForProject, projectID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []VInstrument{} + for rows.Next() { + var i VInstrument + if err := rows.Scan( + &i.ID, + &i.StatusID, + &i.Status, + &i.StatusTime, + &i.Slug, + &i.Name, + &i.TypeID, + &i.ShowCwmsTab, + &i.Type, + &i.Icon, + &i.Geometry, + &i.Station, + &i.Offset, + &i.CreatedBy, + &i.CreatedAt, + &i.UpdatedBy, + &i.UpdatedAt, + &i.NidID, + &i.UsgsID, + &i.Telemetry, + &i.HasCwms, + &i.Projects, + &i.Constants, + &i.Groups, + &i.AlertConfigs, + &i.Opts, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const instrumentUpdate = `-- name: InstrumentUpdate :exec +update instrument set + name=$1, + type_id=$2, + geometry=ST_SetSRID(ST_GeomFromGeoJSON($3::json), 4326), + updated_by=$4, + updated_at=$5, + station=$6, + station_offset=$7, + nid_id=$8, + usgs_id=$9, + show_cwms_tab=$10 +where id = $11 +and id = any( + select instrument_id + from project_instrument + where project_id = $12 +) +` + +type InstrumentUpdateParams struct { + Name string `json:"name"` + TypeID uuid.UUID `json:"type_id"` + Geometry []byte `json:"geometry"` + UpdatedBy *uuid.UUID `json:"updated_by"` + UpdatedAt *time.Time `json:"updated_at"` + Station *int32 `json:"station"` + StationOffset *int32 `json:"station_offset"` + NidID *string `json:"nid_id"` + UsgsID *string `json:"usgs_id"` + ShowCwmsTab bool `json:"show_cwms_tab"` + ID uuid.UUID `json:"id"` + ProjectID uuid.UUID `json:"project_id"` +} + +func (q *Queries) InstrumentUpdate(ctx context.Context, arg InstrumentUpdateParams) error { + _, err := q.db.Exec(ctx, instrumentUpdate, + arg.Name, + arg.TypeID, + arg.Geometry, + arg.UpdatedBy, + arg.UpdatedAt, + arg.Station, + arg.StationOffset, + arg.NidID, + arg.UsgsID, + arg.ShowCwmsTab, + arg.ID, + arg.ProjectID, + ) + return err +} + +const instrumentUpdateGeometry = `-- name: InstrumentUpdateGeometry :one +update instrument set + geometry=ST_SetSRID(ST_GeomFromGeoJSON($1::json), 4326), + updated_by=$2, + updated_at=now() +where id = $3 +and id = any( + select instrument_id + from project_instrument + where project_id = $4 +) +returning id +` + +type InstrumentUpdateGeometryParams struct { + Geometry []byte `json:"geometry"` + UpdatedBy *uuid.UUID `json:"updated_by"` + ID uuid.UUID `json:"id"` + ProjectID uuid.UUID `json:"project_id"` +} + +func (q *Queries) InstrumentUpdateGeometry(ctx context.Context, arg InstrumentUpdateGeometryParams) (uuid.UUID, error) { + row := q.db.QueryRow(ctx, instrumentUpdateGeometry, + arg.Geometry, + arg.UpdatedBy, + arg.ID, + arg.ProjectID, + ) + var id uuid.UUID + err := row.Scan(&id) + return id, err +} + +const projectInstrumentListCountByInstrument = `-- name: ProjectInstrumentListCountByInstrument :many +select pi.instrument_id, i.name as instrument_name, count(pi.*) as project_count +from project_instrument pi +inner join instrument i on pi.instrument_id = i.id +where pi.instrument_id = any($1::uuid[]) +group by pi.instrument_id, i.name +order by i.name +` + +type ProjectInstrumentListCountByInstrumentRow struct { + InstrumentID uuid.UUID `json:"instrument_id"` + InstrumentName string `json:"instrument_name"` + ProjectCount int64 `json:"project_count"` +} + +func (q *Queries) ProjectInstrumentListCountByInstrument(ctx context.Context, instrumentIds []uuid.UUID) ([]ProjectInstrumentListCountByInstrumentRow, error) { + rows, err := q.db.Query(ctx, projectInstrumentListCountByInstrument, instrumentIds) + if err != nil { + return nil, err + } + defer rows.Close() + items := []ProjectInstrumentListCountByInstrumentRow{} + for rows.Next() { + var i ProjectInstrumentListCountByInstrumentRow + if err := rows.Scan(&i.InstrumentID, &i.InstrumentName, &i.ProjectCount); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const projectInstrumentListProjectIDForInstrument = `-- name: ProjectInstrumentListProjectIDForInstrument :many +select project_id from project_instrument where instrument_id = $1 +` + +func (q *Queries) ProjectInstrumentListProjectIDForInstrument(ctx context.Context, instrumentID uuid.UUID) ([]uuid.UUID, error) { + rows, err := q.db.Query(ctx, projectInstrumentListProjectIDForInstrument, instrumentID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []uuid.UUID{} + for rows.Next() { + var project_id uuid.UUID + if err := rows.Scan(&project_id); err != nil { + return nil, err + } + items = append(items, project_id) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} diff --git a/api/internal/db/instrument_assign.sql_gen.go b/api/internal/db/instrument_assign.sql_gen.go new file mode 100644 index 00000000..2120ab24 --- /dev/null +++ b/api/internal/db/instrument_assign.sql_gen.go @@ -0,0 +1,197 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.27.0 +// source: instrument_assign.sql + +package db + +import ( + "context" + + "github.com/google/uuid" +) + +const projectInstrumentCreate = `-- name: ProjectInstrumentCreate :exec +insert into project_instrument (project_id, instrument_id) values ($1, $2) +on conflict on constraint project_instrument_project_id_instrument_id_key do nothing +` + +type ProjectInstrumentCreateParams struct { + ProjectID uuid.UUID `json:"project_id"` + InstrumentID uuid.UUID `json:"instrument_id"` +} + +func (q *Queries) ProjectInstrumentCreate(ctx context.Context, arg ProjectInstrumentCreateParams) error { + _, err := q.db.Exec(ctx, projectInstrumentCreate, arg.ProjectID, arg.InstrumentID) + return err +} + +const projectInstrumentDelete = `-- name: ProjectInstrumentDelete :exec +delete from project_instrument where project_id = $1 and instrument_id = $2 +` + +type ProjectInstrumentDeleteParams struct { + ProjectID uuid.UUID `json:"project_id"` + InstrumentID uuid.UUID `json:"instrument_id"` +} + +func (q *Queries) ProjectInstrumentDelete(ctx context.Context, arg ProjectInstrumentDeleteParams) error { + _, err := q.db.Exec(ctx, projectInstrumentDelete, arg.ProjectID, arg.InstrumentID) + return err +} + +const projectInstrumentListForInstrumentNameProjects = `-- name: ProjectInstrumentListForInstrumentNameProjects :many +select i.name instrument_name +from project_instrument pi +inner join instrument i on pi.instrument_id = i.id +inner join project p on pi.project_id = p.id +where i.name = $1 +and pi.project_id = any($2::uuid[]) +and not i.deleted +order by pi.project_id +` + +type ProjectInstrumentListForInstrumentNameProjectsParams struct { + InstrumentName string `json:"instrument_name"` + ProjectIds []uuid.UUID `json:"project_ids"` +} + +func (q *Queries) ProjectInstrumentListForInstrumentNameProjects(ctx context.Context, arg ProjectInstrumentListForInstrumentNameProjectsParams) ([]string, error) { + rows, err := q.db.Query(ctx, projectInstrumentListForInstrumentNameProjects, arg.InstrumentName, arg.ProjectIds) + if err != nil { + return nil, err + } + defer rows.Close() + items := []string{} + for rows.Next() { + var instrument_name string + if err := rows.Scan(&instrument_name); err != nil { + return nil, err + } + items = append(items, instrument_name) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const projectInstrumentListForInstrumentProjectsProfileAdmin = `-- name: ProjectInstrumentListForInstrumentProjectsProfileAdmin :many +select p.name +from project_instrument pi +inner join project p on pi.project_id = p.id +inner join instrument i on pi.instrument_id = i.id +where pi.instrument_id = $1 +and pi.project_id = any($2::uuid[]) +and not exists ( + select 1 from v_profile_project_roles ppr + where profile_id = $3 + and (ppr.is_admin or (ppr.project_id = pi.project_id and ppr.role = 'ADMIN')) +) +and not i.deleted +order by p.name +` + +type ProjectInstrumentListForInstrumentProjectsProfileAdminParams struct { + InstrumentID uuid.UUID `json:"instrument_id"` + ProjectIds []uuid.UUID `json:"project_ids"` + ProfileID uuid.UUID `json:"profile_id"` +} + +func (q *Queries) ProjectInstrumentListForInstrumentProjectsProfileAdmin(ctx context.Context, arg ProjectInstrumentListForInstrumentProjectsProfileAdminParams) ([]string, error) { + rows, err := q.db.Query(ctx, projectInstrumentListForInstrumentProjectsProfileAdmin, arg.InstrumentID, arg.ProjectIds, arg.ProfileID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []string{} + for rows.Next() { + var name string + if err := rows.Scan(&name); err != nil { + return nil, err + } + items = append(items, name) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const projectInstrumentListForInstrumentsProfileAdmin = `-- name: ProjectInstrumentListForInstrumentsProfileAdmin :many +select p.name as project_name, i.name as instrument_name +from project_instrument pi +inner join project p on pi.project_id = p.id +inner join instrument i on pi.instrument_id = i.id +where pi.instrument_id = any($1::uuid[]) +and not exists ( + select 1 from v_profile_project_roles ppr + where ppr.profile_id = $2 + and (ppr.is_admin or (ppr.project_id = pi.project_id and ppr.role = 'ADMIN')) +) +and not i.deleted +` + +type ProjectInstrumentListForInstrumentsProfileAdminParams struct { + InstrumentIds []uuid.UUID `json:"instrument_ids"` + ProfileID uuid.UUID `json:"profile_id"` +} + +type ProjectInstrumentListForInstrumentsProfileAdminRow struct { + ProjectName string `json:"project_name"` + InstrumentName string `json:"instrument_name"` +} + +func (q *Queries) ProjectInstrumentListForInstrumentsProfileAdmin(ctx context.Context, arg ProjectInstrumentListForInstrumentsProfileAdminParams) ([]ProjectInstrumentListForInstrumentsProfileAdminRow, error) { + rows, err := q.db.Query(ctx, projectInstrumentListForInstrumentsProfileAdmin, arg.InstrumentIds, arg.ProfileID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []ProjectInstrumentListForInstrumentsProfileAdminRow{} + for rows.Next() { + var i ProjectInstrumentListForInstrumentsProfileAdminRow + if err := rows.Scan(&i.ProjectName, &i.InstrumentName); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const projectInstrumentListForProjectInstrumentNames = `-- name: ProjectInstrumentListForProjectInstrumentNames :many +select i.name +from project_instrument pi +inner join instrument i on pi.instrument_id = i.id +where pi.project_id = $1 +and i.name = any($2::text[]) +and not i.deleted +` + +type ProjectInstrumentListForProjectInstrumentNamesParams struct { + ProjectID uuid.UUID `json:"project_id"` + InstrumentNames []string `json:"instrument_names"` +} + +func (q *Queries) ProjectInstrumentListForProjectInstrumentNames(ctx context.Context, arg ProjectInstrumentListForProjectInstrumentNamesParams) ([]string, error) { + rows, err := q.db.Query(ctx, projectInstrumentListForProjectInstrumentNames, arg.ProjectID, arg.InstrumentNames) + if err != nil { + return nil, err + } + defer rows.Close() + items := []string{} + for rows.Next() { + var name string + if err := rows.Scan(&name); err != nil { + return nil, err + } + items = append(items, name) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} diff --git a/api/internal/db/instrument_constant.sql_gen.go b/api/internal/db/instrument_constant.sql_gen.go new file mode 100644 index 00000000..7cf37ec8 --- /dev/null +++ b/api/internal/db/instrument_constant.sql_gen.go @@ -0,0 +1,80 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.27.0 +// source: instrument_constant.sql + +package db + +import ( + "context" + + "github.com/google/uuid" +) + +const instrumentConstantCreate = `-- name: InstrumentConstantCreate :exec +insert into instrument_constants (instrument_id, timeseries_id) values ($1, $2) +` + +type InstrumentConstantCreateParams struct { + InstrumentID uuid.UUID `json:"instrument_id"` + TimeseriesID uuid.UUID `json:"timeseries_id"` +} + +func (q *Queries) InstrumentConstantCreate(ctx context.Context, arg InstrumentConstantCreateParams) error { + _, err := q.db.Exec(ctx, instrumentConstantCreate, arg.InstrumentID, arg.TimeseriesID) + return err +} + +const instrumentConstantDelete = `-- name: InstrumentConstantDelete :exec +delete from instrument_constants where instrument_id = $1 and timeseries_id = $2 +` + +type InstrumentConstantDeleteParams struct { + InstrumentID uuid.UUID `json:"instrument_id"` + TimeseriesID uuid.UUID `json:"timeseries_id"` +} + +func (q *Queries) InstrumentConstantDelete(ctx context.Context, arg InstrumentConstantDeleteParams) error { + _, err := q.db.Exec(ctx, instrumentConstantDelete, arg.InstrumentID, arg.TimeseriesID) + return err +} + +const instrumentConstantList = `-- name: InstrumentConstantList :many +select t.id, t.slug, t.name, t.type, t.is_computed, t.variable, t.instrument_id, t.instrument_slug, t.instrument, t.parameter_id, t.parameter, t.unit_id, t.unit from v_timeseries t +inner join instrument_constants ic on ic.timeseries_id = t.id +where ic.instrument_id = $1 +` + +func (q *Queries) InstrumentConstantList(ctx context.Context, instrumentID uuid.UUID) ([]VTimeseries, error) { + rows, err := q.db.Query(ctx, instrumentConstantList, instrumentID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []VTimeseries{} + for rows.Next() { + var i VTimeseries + if err := rows.Scan( + &i.ID, + &i.Slug, + &i.Name, + &i.Type, + &i.IsComputed, + &i.Variable, + &i.InstrumentID, + &i.InstrumentSlug, + &i.Instrument, + &i.ParameterID, + &i.Parameter, + &i.UnitID, + &i.Unit, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} diff --git a/api/internal/db/instrument_group.sql_gen.go b/api/internal/db/instrument_group.sql_gen.go new file mode 100644 index 00000000..b57a4dfc --- /dev/null +++ b/api/internal/db/instrument_group.sql_gen.go @@ -0,0 +1,255 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.27.0 +// source: instrument_group.sql + +package db + +import ( + "context" + "time" + + "github.com/google/uuid" +) + +const instrumentGroupCreate = `-- name: InstrumentGroupCreate :one +insert into instrument_group (slug, name, description, created_by, created_at, project_id) +values (slugify($1, 'instrument_group'), $1, $2, $3, $4, $5) +returning id, slug, name, description, created_by, created_at, updated_by, updated_at, project_id +` + +type InstrumentGroupCreateParams struct { + Name string `json:"name"` + Description *string `json:"description"` + CreatedBy uuid.UUID `json:"created_by"` + CreatedAt time.Time `json:"created_at"` + ProjectID *uuid.UUID `json:"project_id"` +} + +type InstrumentGroupCreateRow struct { + ID uuid.UUID `json:"id"` + Slug string `json:"slug"` + Name string `json:"name"` + Description *string `json:"description"` + CreatedBy uuid.UUID `json:"created_by"` + CreatedAt time.Time `json:"created_at"` + UpdatedBy *uuid.UUID `json:"updated_by"` + UpdatedAt *time.Time `json:"updated_at"` + ProjectID *uuid.UUID `json:"project_id"` +} + +func (q *Queries) InstrumentGroupCreate(ctx context.Context, arg InstrumentGroupCreateParams) (InstrumentGroupCreateRow, error) { + row := q.db.QueryRow(ctx, instrumentGroupCreate, + arg.Name, + arg.Description, + arg.CreatedBy, + arg.CreatedAt, + arg.ProjectID, + ) + var i InstrumentGroupCreateRow + err := row.Scan( + &i.ID, + &i.Slug, + &i.Name, + &i.Description, + &i.CreatedBy, + &i.CreatedAt, + &i.UpdatedBy, + &i.UpdatedAt, + &i.ProjectID, + ) + return i, err +} + +const instrumentGroupDeleteFlag = `-- name: InstrumentGroupDeleteFlag :exec +update instrument_group set deleted = true where id = $1 +` + +func (q *Queries) InstrumentGroupDeleteFlag(ctx context.Context, id uuid.UUID) error { + _, err := q.db.Exec(ctx, instrumentGroupDeleteFlag, id) + return err +} + +const instrumentGroupGet = `-- name: InstrumentGroupGet :one +select id, slug, name, description, created_by, created_at, updated_by, updated_at, project_id, instrument_count, timeseries_count +from v_instrument_group +where id=$1 +` + +func (q *Queries) InstrumentGroupGet(ctx context.Context, id uuid.UUID) (VInstrumentGroup, error) { + row := q.db.QueryRow(ctx, instrumentGroupGet, id) + var i VInstrumentGroup + err := row.Scan( + &i.ID, + &i.Slug, + &i.Name, + &i.Description, + &i.CreatedBy, + &i.CreatedAt, + &i.UpdatedBy, + &i.UpdatedAt, + &i.ProjectID, + &i.InstrumentCount, + &i.TimeseriesCount, + ) + return i, err +} + +const instrumentGroupInstrumentCreate = `-- name: InstrumentGroupInstrumentCreate :exec +insert into instrument_group_instruments (instrument_group_id, instrument_id) values ($1, $2) +` + +type InstrumentGroupInstrumentCreateParams struct { + InstrumentGroupID uuid.UUID `json:"instrument_group_id"` + InstrumentID uuid.UUID `json:"instrument_id"` +} + +func (q *Queries) InstrumentGroupInstrumentCreate(ctx context.Context, arg InstrumentGroupInstrumentCreateParams) error { + _, err := q.db.Exec(ctx, instrumentGroupInstrumentCreate, arg.InstrumentGroupID, arg.InstrumentID) + return err +} + +const instrumentGroupInstrumentDelete = `-- name: InstrumentGroupInstrumentDelete :exec +delete from instrument_group_instruments where instrument_group_id = $1 and instrument_id = $2 +` + +type InstrumentGroupInstrumentDeleteParams struct { + InstrumentGroupID uuid.UUID `json:"instrument_group_id"` + InstrumentID uuid.UUID `json:"instrument_id"` +} + +func (q *Queries) InstrumentGroupInstrumentDelete(ctx context.Context, arg InstrumentGroupInstrumentDeleteParams) error { + _, err := q.db.Exec(ctx, instrumentGroupInstrumentDelete, arg.InstrumentGroupID, arg.InstrumentID) + return err +} + +const instrumentGroupList = `-- name: InstrumentGroupList :many +select id, slug, name, description, created_by, created_at, updated_by, updated_at, project_id, instrument_count, timeseries_count +from v_instrument_group +` + +func (q *Queries) InstrumentGroupList(ctx context.Context) ([]VInstrumentGroup, error) { + rows, err := q.db.Query(ctx, instrumentGroupList) + if err != nil { + return nil, err + } + defer rows.Close() + items := []VInstrumentGroup{} + for rows.Next() { + var i VInstrumentGroup + if err := rows.Scan( + &i.ID, + &i.Slug, + &i.Name, + &i.Description, + &i.CreatedBy, + &i.CreatedAt, + &i.UpdatedBy, + &i.UpdatedAt, + &i.ProjectID, + &i.InstrumentCount, + &i.TimeseriesCount, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const instrumentGroupListForProject = `-- name: InstrumentGroupListForProject :many +select ig.id, ig.slug, ig.name, ig.description, ig.created_by, ig.created_at, ig.updated_by, ig.updated_at, ig.project_id, ig.instrument_count, ig.timeseries_count +from v_instrument_group ig +where ig.project_id = $1 +` + +func (q *Queries) InstrumentGroupListForProject(ctx context.Context, projectID *uuid.UUID) ([]VInstrumentGroup, error) { + rows, err := q.db.Query(ctx, instrumentGroupListForProject, projectID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []VInstrumentGroup{} + for rows.Next() { + var i VInstrumentGroup + if err := rows.Scan( + &i.ID, + &i.Slug, + &i.Name, + &i.Description, + &i.CreatedBy, + &i.CreatedAt, + &i.UpdatedBy, + &i.UpdatedAt, + &i.ProjectID, + &i.InstrumentCount, + &i.TimeseriesCount, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const instrumentGroupUpdate = `-- name: InstrumentGroupUpdate :one +update instrument_group set + name = $2, + description = $3, + updated_by = $4, + updated_at = $5, + project_id = $6 + where id = $1 +returning id, slug, name, description, created_by, created_at, updated_by, updated_at, project_id +` + +type InstrumentGroupUpdateParams struct { + ID uuid.UUID `json:"id"` + Name string `json:"name"` + Description *string `json:"description"` + UpdatedBy *uuid.UUID `json:"updated_by"` + UpdatedAt *time.Time `json:"updated_at"` + ProjectID *uuid.UUID `json:"project_id"` +} + +type InstrumentGroupUpdateRow struct { + ID uuid.UUID `json:"id"` + Slug string `json:"slug"` + Name string `json:"name"` + Description *string `json:"description"` + CreatedBy uuid.UUID `json:"created_by"` + CreatedAt time.Time `json:"created_at"` + UpdatedBy *uuid.UUID `json:"updated_by"` + UpdatedAt *time.Time `json:"updated_at"` + ProjectID *uuid.UUID `json:"project_id"` +} + +func (q *Queries) InstrumentGroupUpdate(ctx context.Context, arg InstrumentGroupUpdateParams) (InstrumentGroupUpdateRow, error) { + row := q.db.QueryRow(ctx, instrumentGroupUpdate, + arg.ID, + arg.Name, + arg.Description, + arg.UpdatedBy, + arg.UpdatedAt, + arg.ProjectID, + ) + var i InstrumentGroupUpdateRow + err := row.Scan( + &i.ID, + &i.Slug, + &i.Name, + &i.Description, + &i.CreatedBy, + &i.CreatedAt, + &i.UpdatedBy, + &i.UpdatedAt, + &i.ProjectID, + ) + return i, err +} diff --git a/api/internal/db/instrument_incl.sql_gen.go b/api/internal/db/instrument_incl.sql_gen.go new file mode 100644 index 00000000..69b73193 --- /dev/null +++ b/api/internal/db/instrument_incl.sql_gen.go @@ -0,0 +1,191 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.27.0 +// source: instrument_incl.sql + +package db + +import ( + "context" + "time" + + "github.com/google/uuid" +) + +const inclMeasurementListForInstrumentRange = `-- name: InclMeasurementListForInstrumentRange :many +select m1.instrument_id, m1.time, m1.measurements +from v_incl_measurement m1 +where m1.instrument_id=$1 and m1.time >= $2 and m1.time <= $3 +union +select m2.instrument_id, m2.time, m2.measurements +from v_incl_measurement m2 +where m2.time = any(select o.initial_time from incl_opts o where o.instrument_id = $1) +and m2.instrument_id = $1 +order by time asc +` + +type InclMeasurementListForInstrumentRangeParams struct { + InstrumentID uuid.UUID `json:"instrument_id"` + StartTime time.Time `json:"start_time"` + EndTime time.Time `json:"end_time"` +} + +func (q *Queries) InclMeasurementListForInstrumentRange(ctx context.Context, arg InclMeasurementListForInstrumentRangeParams) ([]VInclMeasurement, error) { + rows, err := q.db.Query(ctx, inclMeasurementListForInstrumentRange, arg.InstrumentID, arg.StartTime, arg.EndTime) + if err != nil { + return nil, err + } + defer rows.Close() + items := []VInclMeasurement{} + for rows.Next() { + var i VInclMeasurement + if err := rows.Scan(&i.InstrumentID, &i.Time, &i.Measurements); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const inclOptsCreate = `-- name: InclOptsCreate :exec +insert into incl_opts (instrument_id, num_segments, bottom_elevation_timeseries_id, initial_time) +values ($1, $2, $3, $4) +` + +type InclOptsCreateParams struct { + InstrumentID uuid.UUID `json:"instrument_id"` + NumSegments int32 `json:"num_segments"` + BottomElevationTimeseriesID *uuid.UUID `json:"bottom_elevation_timeseries_id"` + InitialTime *time.Time `json:"initial_time"` +} + +func (q *Queries) InclOptsCreate(ctx context.Context, arg InclOptsCreateParams) error { + _, err := q.db.Exec(ctx, inclOptsCreate, + arg.InstrumentID, + arg.NumSegments, + arg.BottomElevationTimeseriesID, + arg.InitialTime, + ) + return err +} + +const inclOptsUpdate = `-- name: InclOptsUpdate :exec +update incl_opts set + bottom_elevation_timeseries_id = $2, + initial_time = $3 +where instrument_id = $1 +` + +type InclOptsUpdateParams struct { + InstrumentID uuid.UUID `json:"instrument_id"` + BottomElevationTimeseriesID *uuid.UUID `json:"bottom_elevation_timeseries_id"` + InitialTime *time.Time `json:"initial_time"` +} + +func (q *Queries) InclOptsUpdate(ctx context.Context, arg InclOptsUpdateParams) error { + _, err := q.db.Exec(ctx, inclOptsUpdate, arg.InstrumentID, arg.BottomElevationTimeseriesID, arg.InitialTime) + return err +} + +const inclSegmentCreate = `-- name: InclSegmentCreate :exec +insert into incl_segment ( + id, + instrument_id, + depth_timeseries_id, + a0_timeseries_id, + a180_timeseries_id, + b0_timeseries_id, + b180_timeseries_id +) values ($1, $2, $3, $4, $5, $6, $7) +` + +type InclSegmentCreateParams struct { + ID int32 `json:"id"` + InstrumentID uuid.UUID `json:"instrument_id"` + DepthTimeseriesID *uuid.UUID `json:"depth_timeseries_id"` + A0TimeseriesID *uuid.UUID `json:"a0_timeseries_id"` + A180TimeseriesID *uuid.UUID `json:"a180_timeseries_id"` + B0TimeseriesID *uuid.UUID `json:"b0_timeseries_id"` + B180TimeseriesID *uuid.UUID `json:"b180_timeseries_id"` +} + +func (q *Queries) InclSegmentCreate(ctx context.Context, arg InclSegmentCreateParams) error { + _, err := q.db.Exec(ctx, inclSegmentCreate, + arg.ID, + arg.InstrumentID, + arg.DepthTimeseriesID, + arg.A0TimeseriesID, + arg.A180TimeseriesID, + arg.B0TimeseriesID, + arg.B180TimeseriesID, + ) + return err +} + +const inclSegmentListForInstrument = `-- name: InclSegmentListForInstrument :many +select id, instrument_id, depth_timeseries_id, a0_timeseries_id, a180_timeseries_id, b0_timeseries_id, b180_timeseries_id from v_incl_segment where instrument_id = $1 +` + +func (q *Queries) InclSegmentListForInstrument(ctx context.Context, instrumentID uuid.UUID) ([]VInclSegment, error) { + rows, err := q.db.Query(ctx, inclSegmentListForInstrument, instrumentID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []VInclSegment{} + for rows.Next() { + var i VInclSegment + if err := rows.Scan( + &i.ID, + &i.InstrumentID, + &i.DepthTimeseriesID, + &i.A0TimeseriesID, + &i.A180TimeseriesID, + &i.B0TimeseriesID, + &i.B180TimeseriesID, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const inclSegmentUpdate = `-- name: InclSegmentUpdate :exec +update incl_segment set + depth_timeseries_id=$3, + a0_timeseries_id=$4, + a180_timeseries_id=$5, + b0_timeseries_id=$6, + b180_timeseries_id=$7 +where id = $1 and instrument_id = $2 +` + +type InclSegmentUpdateParams struct { + ID int32 `json:"id"` + InstrumentID uuid.UUID `json:"instrument_id"` + DepthTimeseriesID *uuid.UUID `json:"depth_timeseries_id"` + A0TimeseriesID *uuid.UUID `json:"a0_timeseries_id"` + A180TimeseriesID *uuid.UUID `json:"a180_timeseries_id"` + B0TimeseriesID *uuid.UUID `json:"b0_timeseries_id"` + B180TimeseriesID *uuid.UUID `json:"b180_timeseries_id"` +} + +func (q *Queries) InclSegmentUpdate(ctx context.Context, arg InclSegmentUpdateParams) error { + _, err := q.db.Exec(ctx, inclSegmentUpdate, + arg.ID, + arg.InstrumentID, + arg.DepthTimeseriesID, + arg.A0TimeseriesID, + arg.A180TimeseriesID, + arg.B0TimeseriesID, + arg.B180TimeseriesID, + ) + return err +} diff --git a/api/internal/db/instrument_ipi.sql_gen.go b/api/internal/db/instrument_ipi.sql_gen.go new file mode 100644 index 00000000..c952d02c --- /dev/null +++ b/api/internal/db/instrument_ipi.sql_gen.go @@ -0,0 +1,184 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.27.0 +// source: instrument_ipi.sql + +package db + +import ( + "context" + "time" + + "github.com/google/uuid" +) + +const ipiMeasurementListForInstrumentRange = `-- name: IpiMeasurementListForInstrumentRange :many +select m1.instrument_id, m1.time, m1.measurements +from v_ipi_measurement m1 +where m1.instrument_id=$1 and m1.time >= $2 and m1.time <= $3 +union +select m2.instrument_id, m2.time, m2.measurements +from v_ipi_measurement m2 +where m2.time = any(select o.initial_time from ipi_opts o where o.instrument_id = $1) +and m2.instrument_id = $1 +order by time asc +` + +type IpiMeasurementListForInstrumentRangeParams struct { + InstrumentID uuid.UUID `json:"instrument_id"` + StartTime time.Time `json:"start_time"` + EndTime time.Time `json:"end_time"` +} + +func (q *Queries) IpiMeasurementListForInstrumentRange(ctx context.Context, arg IpiMeasurementListForInstrumentRangeParams) ([]VIpiMeasurement, error) { + rows, err := q.db.Query(ctx, ipiMeasurementListForInstrumentRange, arg.InstrumentID, arg.StartTime, arg.EndTime) + if err != nil { + return nil, err + } + defer rows.Close() + items := []VIpiMeasurement{} + for rows.Next() { + var i VIpiMeasurement + if err := rows.Scan(&i.InstrumentID, &i.Time, &i.Measurements); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const ipiOptsCreate = `-- name: IpiOptsCreate :exec +insert into ipi_opts (instrument_id, num_segments, bottom_elevation_timeseries_id, initial_time) +values ($1, $2, $3, $4) +` + +type IpiOptsCreateParams struct { + InstrumentID uuid.UUID `json:"instrument_id"` + NumSegments int32 `json:"num_segments"` + BottomElevationTimeseriesID *uuid.UUID `json:"bottom_elevation_timeseries_id"` + InitialTime *time.Time `json:"initial_time"` +} + +func (q *Queries) IpiOptsCreate(ctx context.Context, arg IpiOptsCreateParams) error { + _, err := q.db.Exec(ctx, ipiOptsCreate, + arg.InstrumentID, + arg.NumSegments, + arg.BottomElevationTimeseriesID, + arg.InitialTime, + ) + return err +} + +const ipiOptsUpdate = `-- name: IpiOptsUpdate :exec +update ipi_opts set + bottom_elevation_timeseries_id = $2, + initial_time = $3 +where instrument_id = $1 +` + +type IpiOptsUpdateParams struct { + InstrumentID uuid.UUID `json:"instrument_id"` + BottomElevationTimeseriesID *uuid.UUID `json:"bottom_elevation_timeseries_id"` + InitialTime *time.Time `json:"initial_time"` +} + +func (q *Queries) IpiOptsUpdate(ctx context.Context, arg IpiOptsUpdateParams) error { + _, err := q.db.Exec(ctx, ipiOptsUpdate, arg.InstrumentID, arg.BottomElevationTimeseriesID, arg.InitialTime) + return err +} + +const ipiSegmentCreate = `-- name: IpiSegmentCreate :exec +insert into ipi_segment ( + id, + instrument_id, + length_timeseries_id, + tilt_timeseries_id, + inc_dev_timeseries_id, + temp_timeseries_id +) values ($1, $2, $3, $4, $5, $6) +` + +type IpiSegmentCreateParams struct { + ID int32 `json:"id"` + InstrumentID uuid.UUID `json:"instrument_id"` + LengthTimeseriesID *uuid.UUID `json:"length_timeseries_id"` + TiltTimeseriesID *uuid.UUID `json:"tilt_timeseries_id"` + IncDevTimeseriesID *uuid.UUID `json:"inc_dev_timeseries_id"` + TempTimeseriesID *uuid.UUID `json:"temp_timeseries_id"` +} + +func (q *Queries) IpiSegmentCreate(ctx context.Context, arg IpiSegmentCreateParams) error { + _, err := q.db.Exec(ctx, ipiSegmentCreate, + arg.ID, + arg.InstrumentID, + arg.LengthTimeseriesID, + arg.TiltTimeseriesID, + arg.IncDevTimeseriesID, + arg.TempTimeseriesID, + ) + return err +} + +const ipiSegmentListForInstrument = `-- name: IpiSegmentListForInstrument :many +select id, instrument_id, length_timeseries_id, length, tilt_timeseries_id, inc_dev_timeseries_id from v_ipi_segment where instrument_id = $1 +` + +func (q *Queries) IpiSegmentListForInstrument(ctx context.Context, instrumentID uuid.UUID) ([]VIpiSegment, error) { + rows, err := q.db.Query(ctx, ipiSegmentListForInstrument, instrumentID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []VIpiSegment{} + for rows.Next() { + var i VIpiSegment + if err := rows.Scan( + &i.ID, + &i.InstrumentID, + &i.LengthTimeseriesID, + &i.Length, + &i.TiltTimeseriesID, + &i.IncDevTimeseriesID, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const ipiSegmentUpdate = `-- name: IpiSegmentUpdate :exec +update ipi_segment set + length_timeseries_id = $3, + tilt_timeseries_id = $4, + inc_dev_timeseries_id = $5, + temp_timeseries_id = $6 +where id = $1 and instrument_id = $2 +` + +type IpiSegmentUpdateParams struct { + ID int32 `json:"id"` + InstrumentID uuid.UUID `json:"instrument_id"` + LengthTimeseriesID *uuid.UUID `json:"length_timeseries_id"` + TiltTimeseriesID *uuid.UUID `json:"tilt_timeseries_id"` + IncDevTimeseriesID *uuid.UUID `json:"inc_dev_timeseries_id"` + TempTimeseriesID *uuid.UUID `json:"temp_timeseries_id"` +} + +func (q *Queries) IpiSegmentUpdate(ctx context.Context, arg IpiSegmentUpdateParams) error { + _, err := q.db.Exec(ctx, ipiSegmentUpdate, + arg.ID, + arg.InstrumentID, + arg.LengthTimeseriesID, + arg.TiltTimeseriesID, + arg.IncDevTimeseriesID, + arg.TempTimeseriesID, + ) + return err +} diff --git a/api/internal/db/instrument_note.sql_gen.go b/api/internal/db/instrument_note.sql_gen.go new file mode 100644 index 00000000..f9874322 --- /dev/null +++ b/api/internal/db/instrument_note.sql_gen.go @@ -0,0 +1,164 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.27.0 +// source: instrument_note.sql + +package db + +import ( + "context" + "time" + + "github.com/google/uuid" +) + +const instrumentNoteCreate = `-- name: InstrumentNoteCreate :one +insert into instrument_note (instrument_id, title, body, time, created_by, created_at) +values ($1, $2, $3, $4, $5, $6) +returning id, instrument_id, title, body, time, created_by, created_at, updated_by, updated_at +` + +type InstrumentNoteCreateParams struct { + InstrumentID uuid.UUID `json:"instrument_id"` + Title string `json:"title"` + Body string `json:"body"` + Time time.Time `json:"time"` + CreatedBy uuid.UUID `json:"created_by"` + CreatedAt time.Time `json:"created_at"` +} + +func (q *Queries) InstrumentNoteCreate(ctx context.Context, arg InstrumentNoteCreateParams) (InstrumentNote, error) { + row := q.db.QueryRow(ctx, instrumentNoteCreate, + arg.InstrumentID, + arg.Title, + arg.Body, + arg.Time, + arg.CreatedBy, + arg.CreatedAt, + ) + var i InstrumentNote + err := row.Scan( + &i.ID, + &i.InstrumentID, + &i.Title, + &i.Body, + &i.Time, + &i.CreatedBy, + &i.CreatedAt, + &i.UpdatedBy, + &i.UpdatedAt, + ) + return i, err +} + +const instrumentNoteDelete = `-- name: InstrumentNoteDelete :exec +delete from instrument_note where id = $1 +` + +func (q *Queries) InstrumentNoteDelete(ctx context.Context, id uuid.UUID) error { + _, err := q.db.Exec(ctx, instrumentNoteDelete, id) + return err +} + +const instrumentNoteGet = `-- name: InstrumentNoteGet :one +select id, instrument_id, title, body, time, created_by, created_at, updated_by, updated_at +from instrument_note +where id = $1 +` + +func (q *Queries) InstrumentNoteGet(ctx context.Context, id uuid.UUID) (InstrumentNote, error) { + row := q.db.QueryRow(ctx, instrumentNoteGet, id) + var i InstrumentNote + err := row.Scan( + &i.ID, + &i.InstrumentID, + &i.Title, + &i.Body, + &i.Time, + &i.CreatedBy, + &i.CreatedAt, + &i.UpdatedBy, + &i.UpdatedAt, + ) + return i, err +} + +const instrumentNoteListForInstrument = `-- name: InstrumentNoteListForInstrument :many +select id, instrument_id, title, body, time, created_by, created_at, updated_by, updated_at +from instrument_note +where instrument_id = $1 +` + +func (q *Queries) InstrumentNoteListForInstrument(ctx context.Context, instrumentID uuid.UUID) ([]InstrumentNote, error) { + rows, err := q.db.Query(ctx, instrumentNoteListForInstrument, instrumentID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []InstrumentNote{} + for rows.Next() { + var i InstrumentNote + if err := rows.Scan( + &i.ID, + &i.InstrumentID, + &i.Title, + &i.Body, + &i.Time, + &i.CreatedBy, + &i.CreatedAt, + &i.UpdatedBy, + &i.UpdatedAt, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const instrumentNoteUpdate = `-- name: InstrumentNoteUpdate :one +update instrument_note set + title=$2, + body=$3, + time=$4, + updated_by=$5, + updated_at=$6 +where id = $1 +returning id, instrument_id, title, body, time, created_by, created_at, updated_by, updated_at +` + +type InstrumentNoteUpdateParams struct { + ID uuid.UUID `json:"id"` + Title string `json:"title"` + Body string `json:"body"` + Time time.Time `json:"time"` + UpdatedBy *uuid.UUID `json:"updated_by"` + UpdatedAt *time.Time `json:"updated_at"` +} + +func (q *Queries) InstrumentNoteUpdate(ctx context.Context, arg InstrumentNoteUpdateParams) (InstrumentNote, error) { + row := q.db.QueryRow(ctx, instrumentNoteUpdate, + arg.ID, + arg.Title, + arg.Body, + arg.Time, + arg.UpdatedBy, + arg.UpdatedAt, + ) + var i InstrumentNote + err := row.Scan( + &i.ID, + &i.InstrumentID, + &i.Title, + &i.Body, + &i.Time, + &i.CreatedBy, + &i.CreatedAt, + &i.UpdatedBy, + &i.UpdatedAt, + ) + return i, err +} diff --git a/api/internal/db/instrument_saa.sql_gen.go b/api/internal/db/instrument_saa.sql_gen.go new file mode 100644 index 00000000..fd781274 --- /dev/null +++ b/api/internal/db/instrument_saa.sql_gen.go @@ -0,0 +1,192 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.27.0 +// source: instrument_saa.sql + +package db + +import ( + "context" + "time" + + "github.com/google/uuid" +) + +const saaMeasurementListForInstrumentRange = `-- name: SaaMeasurementListForInstrumentRange :many +select m1.instrument_id, m1.time, m1.measurements +from v_saa_measurement m1 +where m1.instrument_id = $1 and m1.time >= $2 and m1.time <= $3 +union +select m2.instrument_id, m2.time, m2.measurements +from v_saa_measurement m2 +where m2.time = any(select o.initial_time from saa_opts o where o.instrument_id = $1) +and m2.instrument_id = $1 +order by time asc +` + +type SaaMeasurementListForInstrumentRangeParams struct { + InstrumentID uuid.UUID `json:"instrument_id"` + StartTime time.Time `json:"start_time"` + EndTime time.Time `json:"end_time"` +} + +func (q *Queries) SaaMeasurementListForInstrumentRange(ctx context.Context, arg SaaMeasurementListForInstrumentRangeParams) ([]VSaaMeasurement, error) { + rows, err := q.db.Query(ctx, saaMeasurementListForInstrumentRange, arg.InstrumentID, arg.StartTime, arg.EndTime) + if err != nil { + return nil, err + } + defer rows.Close() + items := []VSaaMeasurement{} + for rows.Next() { + var i VSaaMeasurement + if err := rows.Scan(&i.InstrumentID, &i.Time, &i.Measurements); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const saaOptsCreate = `-- name: SaaOptsCreate :exec +insert into saa_opts (instrument_id, num_segments, bottom_elevation_timeseries_id, initial_time) +values ($1, $2, $3, $4) +` + +type SaaOptsCreateParams struct { + InstrumentID uuid.UUID `json:"instrument_id"` + NumSegments int32 `json:"num_segments"` + BottomElevationTimeseriesID *uuid.UUID `json:"bottom_elevation_timeseries_id"` + InitialTime *time.Time `json:"initial_time"` +} + +func (q *Queries) SaaOptsCreate(ctx context.Context, arg SaaOptsCreateParams) error { + _, err := q.db.Exec(ctx, saaOptsCreate, + arg.InstrumentID, + arg.NumSegments, + arg.BottomElevationTimeseriesID, + arg.InitialTime, + ) + return err +} + +const saaOptsUpdate = `-- name: SaaOptsUpdate :exec +update saa_opts set + bottom_elevation_timeseries_id = $2, + initial_time = $3 +where instrument_id = $1 +` + +type SaaOptsUpdateParams struct { + InstrumentID uuid.UUID `json:"instrument_id"` + BottomElevationTimeseriesID *uuid.UUID `json:"bottom_elevation_timeseries_id"` + InitialTime *time.Time `json:"initial_time"` +} + +func (q *Queries) SaaOptsUpdate(ctx context.Context, arg SaaOptsUpdateParams) error { + _, err := q.db.Exec(ctx, saaOptsUpdate, arg.InstrumentID, arg.BottomElevationTimeseriesID, arg.InitialTime) + return err +} + +const saaSegmentCreate = `-- name: SaaSegmentCreate :exec +insert into saa_segment ( + id, + instrument_id, + length_timeseries_id, + x_timeseries_id, + y_timeseries_id, + z_timeseries_id, + temp_timeseries_id +) values ($1, $2, $3, $4, $5, $6, $7) +` + +type SaaSegmentCreateParams struct { + ID int32 `json:"id"` + InstrumentID uuid.UUID `json:"instrument_id"` + LengthTimeseriesID *uuid.UUID `json:"length_timeseries_id"` + XTimeseriesID *uuid.UUID `json:"x_timeseries_id"` + YTimeseriesID *uuid.UUID `json:"y_timeseries_id"` + ZTimeseriesID *uuid.UUID `json:"z_timeseries_id"` + TempTimeseriesID *uuid.UUID `json:"temp_timeseries_id"` +} + +func (q *Queries) SaaSegmentCreate(ctx context.Context, arg SaaSegmentCreateParams) error { + _, err := q.db.Exec(ctx, saaSegmentCreate, + arg.ID, + arg.InstrumentID, + arg.LengthTimeseriesID, + arg.XTimeseriesID, + arg.YTimeseriesID, + arg.ZTimeseriesID, + arg.TempTimeseriesID, + ) + return err +} + +const saaSegmentListForInstrument = `-- name: SaaSegmentListForInstrument :many +select id, instrument_id, length_timeseries_id, length, x_timeseries_id, y_timeseries_id, z_timeseries_id, temp_timeseries_id from v_saa_segment where instrument_id = $1 +` + +func (q *Queries) SaaSegmentListForInstrument(ctx context.Context, instrumentID uuid.UUID) ([]VSaaSegment, error) { + rows, err := q.db.Query(ctx, saaSegmentListForInstrument, instrumentID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []VSaaSegment{} + for rows.Next() { + var i VSaaSegment + if err := rows.Scan( + &i.ID, + &i.InstrumentID, + &i.LengthTimeseriesID, + &i.Length, + &i.XTimeseriesID, + &i.YTimeseriesID, + &i.ZTimeseriesID, + &i.TempTimeseriesID, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const saaSegmentUpdate = `-- name: SaaSegmentUpdate :exec +update saa_segment set + length_timeseries_id = $3, + x_timeseries_id = $4, + y_timeseries_id = $5, + z_timeseries_id = $6, + temp_timeseries_id = $7 +where id = $1 and instrument_id = $2 +` + +type SaaSegmentUpdateParams struct { + ID int32 `json:"id"` + InstrumentID uuid.UUID `json:"instrument_id"` + LengthTimeseriesID *uuid.UUID `json:"length_timeseries_id"` + XTimeseriesID *uuid.UUID `json:"x_timeseries_id"` + YTimeseriesID *uuid.UUID `json:"y_timeseries_id"` + ZTimeseriesID *uuid.UUID `json:"z_timeseries_id"` + TempTimeseriesID *uuid.UUID `json:"temp_timeseries_id"` +} + +func (q *Queries) SaaSegmentUpdate(ctx context.Context, arg SaaSegmentUpdateParams) error { + _, err := q.db.Exec(ctx, saaSegmentUpdate, + arg.ID, + arg.InstrumentID, + arg.LengthTimeseriesID, + arg.XTimeseriesID, + arg.YTimeseriesID, + arg.ZTimeseriesID, + arg.TempTimeseriesID, + ) + return err +} diff --git a/api/internal/db/instrument_status.sql_gen.go b/api/internal/db/instrument_status.sql_gen.go new file mode 100644 index 00000000..235ecbef --- /dev/null +++ b/api/internal/db/instrument_status.sql_gen.go @@ -0,0 +1,88 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.27.0 +// source: instrument_status.sql + +package db + +import ( + "context" + "time" + + "github.com/google/uuid" +) + +const instrumentStatusCreateOrUpdate = `-- name: InstrumentStatusCreateOrUpdate :exec +insert into instrument_status (instrument_id, status_id, time) values ($1, $2, $3) +on conflict on constraint instrument_unique_status_in_time do update set status_id = excluded.status_id +` + +type InstrumentStatusCreateOrUpdateParams struct { + InstrumentID uuid.UUID `json:"instrument_id"` + StatusID uuid.UUID `json:"status_id"` + Time time.Time `json:"time"` +} + +func (q *Queries) InstrumentStatusCreateOrUpdate(ctx context.Context, arg InstrumentStatusCreateOrUpdateParams) error { + _, err := q.db.Exec(ctx, instrumentStatusCreateOrUpdate, arg.InstrumentID, arg.StatusID, arg.Time) + return err +} + +const instrumentStatusDelete = `-- name: InstrumentStatusDelete :exec +delete from instrument_status where id = $1 +` + +func (q *Queries) InstrumentStatusDelete(ctx context.Context, id uuid.UUID) error { + _, err := q.db.Exec(ctx, instrumentStatusDelete, id) + return err +} + +const instrumentStatusGet = `-- name: InstrumentStatusGet :one +select id, instrument_id, status_id, status, time from v_instrument_status +where id=$1 +` + +func (q *Queries) InstrumentStatusGet(ctx context.Context, id uuid.UUID) (VInstrumentStatus, error) { + row := q.db.QueryRow(ctx, instrumentStatusGet, id) + var i VInstrumentStatus + err := row.Scan( + &i.ID, + &i.InstrumentID, + &i.StatusID, + &i.Status, + &i.Time, + ) + return i, err +} + +const instrumentStatusListForInstrument = `-- name: InstrumentStatusListForInstrument :many +select id, instrument_id, status_id, status, time from v_instrument_status +where instrument_id=$1 +order by time desc +` + +func (q *Queries) InstrumentStatusListForInstrument(ctx context.Context, instrumentID uuid.UUID) ([]VInstrumentStatus, error) { + rows, err := q.db.Query(ctx, instrumentStatusListForInstrument, instrumentID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []VInstrumentStatus{} + for rows.Next() { + var i VInstrumentStatus + if err := rows.Scan( + &i.ID, + &i.InstrumentID, + &i.StatusID, + &i.Status, + &i.Time, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} diff --git a/api/internal/db/measurement.manual.go b/api/internal/db/measurement.manual.go new file mode 100644 index 00000000..700e69c0 --- /dev/null +++ b/api/internal/db/measurement.manual.go @@ -0,0 +1,149 @@ +package db + +import ( + "context" + "math" + "time" + + "github.com/google/uuid" + "github.com/jackc/pgx/v5" +) + +type TimeseriesMeasurementCollectionGetForRangeParams struct { + TimeseriesID uuid.UUID `json:"timeseries_id"` + After time.Time `json:"after"` + Before time.Time `json:"before"` + Threshold int `json:"threshold"` +} + +func (q *Queries) TimeseriesMeasurementCollectionGetForRange(ctx context.Context, arg TimeseriesMeasurementCollectionGetForRangeParams) (MeasurementCollection, error) { + var mc MeasurementCollection + rows, err := q.db.Query(ctx, timeseriesMeasurementListForRange, arg.TimeseriesID, arg.After, arg.Before) + if err != nil { + return mc, err + } + mm, err := pgx.CollectRows[Measurement](rows, pgx.RowToStructByNameLax) + if err != nil { + return mc, err + } + mc.TimeseriesID = arg.TimeseriesID + mc.Items = LTTB(mm, arg.Threshold) + return mc, nil +} + +type MeasurementGetter interface { + getTime() time.Time + getValue() float64 +} + +func (m VTimeseriesMeasurement) getTime() time.Time { + return m.Time +} + +func (m VTimeseriesMeasurement) getValue() float64 { + return float64(m.Value) +} + +func (m Measurement) getTime() time.Time { + return m.Time +} + +func (m Measurement) getValue() float64 { + return float64(m.Value) +} + +func (ml MeasurementLean) getTime() time.Time { + var t time.Time + for k := range ml { + t = k + } + return t +} + +func (ml MeasurementLean) getValue() float64 { + var m float64 + for _, v := range ml { + m = v + } + return m +} + +// A slightly modified LTTB (Largest-Triange-Three-Buckets) algorithm for downsampling timeseries measurements +// https://godoc.org/github.com/dgryski/go-lttb +func LTTB[T MeasurementGetter](data []T, threshold int) []T { + if threshold == 0 || threshold >= len(data) { + return data // Nothing to do + } + + if threshold < 3 { + threshold = 3 + } + + sampled := make([]T, 0, threshold) + + // Bucket size. Leave room for start and end data points + every := float64(len(data)-2) / float64(threshold-2) + + sampled = append(sampled, data[0]) // Always add the first point + + bucketStart := 1 + bucketCenter := int(math.Floor(every)) + 1 + + var a int + + for i := 0; i < threshold-2; i++ { + + bucketEnd := int(math.Floor(float64(i+2)*every)) + 1 + + // Calculate point average for next bucket (containing c) + avgRangeStart := bucketCenter + avgRangeEnd := bucketEnd + + if avgRangeEnd >= len(data) { + avgRangeEnd = len(data) + } + + avgRangeLength := float64(avgRangeEnd - avgRangeStart) + + var avgX, avgY float64 + for ; avgRangeStart < avgRangeEnd; avgRangeStart++ { + avgX += time.Duration(data[avgRangeStart].getTime().Unix()).Seconds() + avgY += data[avgRangeStart].getValue() + } + avgX /= avgRangeLength + avgY /= avgRangeLength + + // Get the range for this bucket + rangeOffs := bucketStart + rangeTo := bucketCenter + + // Point a + pointAX := time.Duration(data[a].getTime().UnixNano()).Seconds() + pointAY := data[a].getValue() + + maxArea := float64(-1.0) + + var nextA int + for ; rangeOffs < rangeTo; rangeOffs++ { + // Calculate triangle area over three buckets + area := (pointAX-avgX)*(data[rangeOffs].getValue()-pointAY) - (pointAX-time.Duration(data[rangeOffs].getTime().Unix()).Seconds())*(avgY-pointAY) + // We only care about the relative area here. + // Calling math.Abs() is slower than squaring + area *= area + if area > maxArea { + maxArea = area + nextA = rangeOffs // Next a is this b + } + } + + sampled = append(sampled, data[nextA]) // Pick this point from the bucket + a = nextA // This a is the next a (chosen b) + + bucketStart = bucketCenter + bucketCenter = bucketEnd + } + + sampled = append(sampled, data[len(data)-1]) // Always add last + + return sampled +} diff --git a/api/internal/db/measurement.sql_gen.go b/api/internal/db/measurement.sql_gen.go new file mode 100644 index 00000000..446de27b --- /dev/null +++ b/api/internal/db/measurement.sql_gen.go @@ -0,0 +1,206 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.27.0 +// source: measurement.sql + +package db + +import ( + "context" + "time" + + "github.com/google/uuid" +) + +const timeseriesMeasurementCreate = `-- name: TimeseriesMeasurementCreate :exec +insert into timeseries_measurement (timeseries_id, time, value) values ($1, $2, $3) +on conflict on constraint timeseries_unique_time do nothing +` + +type TimeseriesMeasurementCreateParams struct { + TimeseriesID uuid.UUID `json:"timeseries_id"` + Time time.Time `json:"time"` + Value float64 `json:"value"` +} + +func (q *Queries) TimeseriesMeasurementCreate(ctx context.Context, arg TimeseriesMeasurementCreateParams) error { + _, err := q.db.Exec(ctx, timeseriesMeasurementCreate, arg.TimeseriesID, arg.Time, arg.Value) + return err +} + +const timeseriesMeasurementCreateOrUpdate = `-- name: TimeseriesMeasurementCreateOrUpdate :exec +insert into timeseries_measurement (timeseries_id, time, value) values ($1, $2, $3) +on conflict on constraint timeseries_unique_time do update set value = excluded.value +` + +type TimeseriesMeasurementCreateOrUpdateParams struct { + TimeseriesID uuid.UUID `json:"timeseries_id"` + Time time.Time `json:"time"` + Value float64 `json:"value"` +} + +func (q *Queries) TimeseriesMeasurementCreateOrUpdate(ctx context.Context, arg TimeseriesMeasurementCreateOrUpdateParams) error { + _, err := q.db.Exec(ctx, timeseriesMeasurementCreateOrUpdate, arg.TimeseriesID, arg.Time, arg.Value) + return err +} + +const timeseriesMeasurementDelete = `-- name: TimeseriesMeasurementDelete :exec +delete from timeseries_measurement where timeseries_id=$1 and time=$2 +` + +type TimeseriesMeasurementDeleteParams struct { + TimeseriesID uuid.UUID `json:"timeseries_id"` + Time time.Time `json:"time"` +} + +func (q *Queries) TimeseriesMeasurementDelete(ctx context.Context, arg TimeseriesMeasurementDeleteParams) error { + _, err := q.db.Exec(ctx, timeseriesMeasurementDelete, arg.TimeseriesID, arg.Time) + return err +} + +const timeseriesMeasurementDeleteRange = `-- name: TimeseriesMeasurementDeleteRange :exec +delete from timeseries_measurement where timeseries_id = $1 and time > $2 and time < $3 +` + +type TimeseriesMeasurementDeleteRangeParams struct { + TimeseriesID uuid.UUID `json:"timeseries_id"` + After time.Time `json:"after"` + Before time.Time `json:"before"` +} + +func (q *Queries) TimeseriesMeasurementDeleteRange(ctx context.Context, arg TimeseriesMeasurementDeleteRangeParams) error { + _, err := q.db.Exec(ctx, timeseriesMeasurementDeleteRange, arg.TimeseriesID, arg.After, arg.Before) + return err +} + +const timeseriesMeasurementGetMostRecent = `-- name: TimeseriesMeasurementGetMostRecent :one +select time, value, timeseries_id +from timeseries_measurement +where timeseries_id = $1 +order by time desc +limit 1 +` + +func (q *Queries) TimeseriesMeasurementGetMostRecent(ctx context.Context, timeseriesID uuid.UUID) (TimeseriesMeasurement, error) { + row := q.db.QueryRow(ctx, timeseriesMeasurementGetMostRecent, timeseriesID) + var i TimeseriesMeasurement + err := row.Scan(&i.Time, &i.Value, &i.TimeseriesID) + return i, err +} + +const timeseriesMeasurementListForRange = `-- name: TimeseriesMeasurementListForRange :many +select timeseries_id, time, value, masked, validated, annotation from v_timeseries_measurement +where timeseries_id=$1 +and time > $2 +and time < $3 +` + +type TimeseriesMeasurementListForRangeParams struct { + TimeseriesID uuid.UUID `json:"timeseries_id"` + After time.Time `json:"after"` + Before time.Time `json:"before"` +} + +func (q *Queries) TimeseriesMeasurementListForRange(ctx context.Context, arg TimeseriesMeasurementListForRangeParams) ([]VTimeseriesMeasurement, error) { + rows, err := q.db.Query(ctx, timeseriesMeasurementListForRange, arg.TimeseriesID, arg.After, arg.Before) + if err != nil { + return nil, err + } + defer rows.Close() + items := []VTimeseriesMeasurement{} + for rows.Next() { + var i VTimeseriesMeasurement + if err := rows.Scan( + &i.TimeseriesID, + &i.Time, + &i.Value, + &i.Masked, + &i.Validated, + &i.Annotation, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const timeseriesNoteCreate = `-- name: TimeseriesNoteCreate :exec +insert into timeseries_notes (timeseries_id, time, masked, validated, annotation) values ($1, $2, $3, $4, $5) +on conflict on constraint notes_unique_time do nothing +` + +type TimeseriesNoteCreateParams struct { + TimeseriesID uuid.UUID `json:"timeseries_id"` + Time time.Time `json:"time"` + Masked *bool `json:"masked"` + Validated *bool `json:"validated"` + Annotation *string `json:"annotation"` +} + +func (q *Queries) TimeseriesNoteCreate(ctx context.Context, arg TimeseriesNoteCreateParams) error { + _, err := q.db.Exec(ctx, timeseriesNoteCreate, + arg.TimeseriesID, + arg.Time, + arg.Masked, + arg.Validated, + arg.Annotation, + ) + return err +} + +const timeseriesNoteCreateOrUpdate = `-- name: TimeseriesNoteCreateOrUpdate :exec +insert into timeseries_notes (timeseries_id, time, masked, validated, annotation) values ($1, $2, $3, $4, $5) +on conflict on constraint notes_unique_time do update set masked = excluded.masked, validated = excluded.validated, annotation = excluded.annotation +` + +type TimeseriesNoteCreateOrUpdateParams struct { + TimeseriesID uuid.UUID `json:"timeseries_id"` + Time time.Time `json:"time"` + Masked *bool `json:"masked"` + Validated *bool `json:"validated"` + Annotation *string `json:"annotation"` +} + +func (q *Queries) TimeseriesNoteCreateOrUpdate(ctx context.Context, arg TimeseriesNoteCreateOrUpdateParams) error { + _, err := q.db.Exec(ctx, timeseriesNoteCreateOrUpdate, + arg.TimeseriesID, + arg.Time, + arg.Masked, + arg.Validated, + arg.Annotation, + ) + return err +} + +const timeseriesNoteDelete = `-- name: TimeseriesNoteDelete :exec +delete from timeseries_notes where timeseries_id=$1 and time=$2 +` + +type TimeseriesNoteDeleteParams struct { + TimeseriesID uuid.UUID `json:"timeseries_id"` + Time time.Time `json:"time"` +} + +func (q *Queries) TimeseriesNoteDelete(ctx context.Context, arg TimeseriesNoteDeleteParams) error { + _, err := q.db.Exec(ctx, timeseriesNoteDelete, arg.TimeseriesID, arg.Time) + return err +} + +const timeseriesNoteDeleteRange = `-- name: TimeseriesNoteDeleteRange :exec +delete from timeseries_notes where timeseries_id = $1 and time > $2 and time < $3 +` + +type TimeseriesNoteDeleteRangeParams struct { + TimeseriesID uuid.UUID `json:"timeseries_id"` + After time.Time `json:"after"` + Before time.Time `json:"before"` +} + +func (q *Queries) TimeseriesNoteDeleteRange(ctx context.Context, arg TimeseriesNoteDeleteRangeParams) error { + _, err := q.db.Exec(ctx, timeseriesNoteDeleteRange, arg.TimeseriesID, arg.After, arg.Before) + return err +} diff --git a/api/internal/db/models.go b/api/internal/db/models.go new file mode 100644 index 00000000..b7c180ec --- /dev/null +++ b/api/internal/db/models.go @@ -0,0 +1,1435 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.27.0 + +package db + +import ( + "database/sql/driver" + "encoding/json" + "fmt" + "time" + + "github.com/google/uuid" +) + +type JobStatus string + +const ( + JobStatusSUCCESS JobStatus = "SUCCESS" + JobStatusFAIL JobStatus = "FAIL" + JobStatusINIT JobStatus = "INIT" +) + +func (e *JobStatus) Scan(src interface{}) error { + switch s := src.(type) { + case []byte: + *e = JobStatus(s) + case string: + *e = JobStatus(s) + default: + return fmt.Errorf("unsupported scan type for JobStatus: %T", src) + } + return nil +} + +type NullJobStatus struct { + JobStatus JobStatus `json:"job_status"` + Valid bool `json:"valid"` // Valid is true if JobStatus is not NULL +} + +// Scan implements the Scanner interface. +func (ns *NullJobStatus) Scan(value interface{}) error { + if value == nil { + ns.JobStatus, ns.Valid = "", false + return nil + } + ns.Valid = true + return ns.JobStatus.Scan(value) +} + +// Value implements the driver Valuer interface. +func (ns NullJobStatus) Value() (driver.Value, error) { + if !ns.Valid { + return nil, nil + } + return string(ns.JobStatus), nil +} + +type LineStyle string + +const ( + LineStyleSolid LineStyle = "solid" + LineStyleDot LineStyle = "dot" + LineStyleDash LineStyle = "dash" + LineStyleLongdash LineStyle = "longdash" + LineStyleDashdot LineStyle = "dashdot" + LineStyleLongdashdot LineStyle = "longdashdot" +) + +func (e *LineStyle) Scan(src interface{}) error { + switch s := src.(type) { + case []byte: + *e = LineStyle(s) + case string: + *e = LineStyle(s) + default: + return fmt.Errorf("unsupported scan type for LineStyle: %T", src) + } + return nil +} + +type NullLineStyle struct { + LineStyle LineStyle `json:"line_style"` + Valid bool `json:"valid"` // Valid is true if LineStyle is not NULL +} + +// Scan implements the Scanner interface. +func (ns *NullLineStyle) Scan(value interface{}) error { + if value == nil { + ns.LineStyle, ns.Valid = "", false + return nil + } + ns.Valid = true + return ns.LineStyle.Scan(value) +} + +// Value implements the driver Valuer interface. +func (ns NullLineStyle) Value() (driver.Value, error) { + if !ns.Valid { + return nil, nil + } + return string(ns.LineStyle), nil +} + +type PlotType string + +const ( + PlotTypeScatterLine PlotType = "scatter-line" + PlotTypeProfile PlotType = "profile" + PlotTypeContour PlotType = "contour" + PlotTypeBullseye PlotType = "bullseye" +) + +func (e *PlotType) Scan(src interface{}) error { + switch s := src.(type) { + case []byte: + *e = PlotType(s) + case string: + *e = PlotType(s) + default: + return fmt.Errorf("unsupported scan type for PlotType: %T", src) + } + return nil +} + +type NullPlotType struct { + PlotType PlotType `json:"plot_type"` + Valid bool `json:"valid"` // Valid is true if PlotType is not NULL +} + +// Scan implements the Scanner interface. +func (ns *NullPlotType) Scan(value interface{}) error { + if value == nil { + ns.PlotType, ns.Valid = "", false + return nil + } + ns.Valid = true + return ns.PlotType.Scan(value) +} + +// Value implements the driver Valuer interface. +func (ns NullPlotType) Value() (driver.Value, error) { + if !ns.Valid { + return nil, nil + } + return string(ns.PlotType), nil +} + +type TimeseriesType string + +const ( + TimeseriesTypeStandard TimeseriesType = "standard" + TimeseriesTypeConstant TimeseriesType = "constant" + TimeseriesTypeComputed TimeseriesType = "computed" + TimeseriesTypeCwms TimeseriesType = "cwms" +) + +func (e *TimeseriesType) Scan(src interface{}) error { + switch s := src.(type) { + case []byte: + *e = TimeseriesType(s) + case string: + *e = TimeseriesType(s) + default: + return fmt.Errorf("unsupported scan type for TimeseriesType: %T", src) + } + return nil +} + +type NullTimeseriesType struct { + TimeseriesType TimeseriesType `json:"timeseries_type"` + Valid bool `json:"valid"` // Valid is true if TimeseriesType is not NULL +} + +// Scan implements the Scanner interface. +func (ns *NullTimeseriesType) Scan(value interface{}) error { + if value == nil { + ns.TimeseriesType, ns.Valid = "", false + return nil + } + ns.Valid = true + return ns.TimeseriesType.Scan(value) +} + +// Value implements the driver Valuer interface. +func (ns NullTimeseriesType) Value() (driver.Value, error) { + if !ns.Valid { + return nil, nil + } + return string(ns.TimeseriesType), nil +} + +type TraceType string + +const ( + TraceTypeBar TraceType = "bar" + TraceTypeScattergl TraceType = "scattergl" +) + +func (e *TraceType) Scan(src interface{}) error { + switch s := src.(type) { + case []byte: + *e = TraceType(s) + case string: + *e = TraceType(s) + default: + return fmt.Errorf("unsupported scan type for TraceType: %T", src) + } + return nil +} + +type NullTraceType struct { + TraceType TraceType `json:"trace_type"` + Valid bool `json:"valid"` // Valid is true if TraceType is not NULL +} + +// Scan implements the Scanner interface. +func (ns *NullTraceType) Scan(value interface{}) error { + if value == nil { + ns.TraceType, ns.Valid = "", false + return nil + } + ns.Valid = true + return ns.TraceType.Scan(value) +} + +// Value implements the driver Valuer interface. +func (ns NullTraceType) Value() (driver.Value, error) { + if !ns.Valid { + return nil, nil + } + return string(ns.TraceType), nil +} + +type UploaderConfigType string + +const ( + UploaderConfigTypeCsv UploaderConfigType = "csv" + UploaderConfigTypeDux UploaderConfigType = "dux" + UploaderConfigTypeToa5 UploaderConfigType = "toa5" +) + +func (e *UploaderConfigType) Scan(src interface{}) error { + switch s := src.(type) { + case []byte: + *e = UploaderConfigType(s) + case string: + *e = UploaderConfigType(s) + default: + return fmt.Errorf("unsupported scan type for UploaderConfigType: %T", src) + } + return nil +} + +type NullUploaderConfigType struct { + UploaderConfigType UploaderConfigType `json:"uploader_config_type"` + Valid bool `json:"valid"` // Valid is true if UploaderConfigType is not NULL +} + +// Scan implements the Scanner interface. +func (ns *NullUploaderConfigType) Scan(value interface{}) error { + if value == nil { + ns.UploaderConfigType, ns.Valid = "", false + return nil + } + ns.Valid = true + return ns.UploaderConfigType.Scan(value) +} + +// Value implements the driver Valuer interface. +func (ns NullUploaderConfigType) Value() (driver.Value, error) { + if !ns.Valid { + return nil, nil + } + return string(ns.UploaderConfigType), nil +} + +type YAxis string + +const ( + YAxisY1 YAxis = "y1" + YAxisY2 YAxis = "y2" +) + +func (e *YAxis) Scan(src interface{}) error { + switch s := src.(type) { + case []byte: + *e = YAxis(s) + case string: + *e = YAxis(s) + default: + return fmt.Errorf("unsupported scan type for YAxis: %T", src) + } + return nil +} + +type NullYAxis struct { + YAxis YAxis `json:"y_axis"` + Valid bool `json:"valid"` // Valid is true if YAxis is not NULL +} + +// Scan implements the Scanner interface. +func (ns *NullYAxis) Scan(value interface{}) error { + if value == nil { + ns.YAxis, ns.Valid = "", false + return nil + } + ns.Valid = true + return ns.YAxis.Scan(value) +} + +// Value implements the driver Valuer interface. +func (ns NullYAxis) Value() (driver.Value, error) { + if !ns.Valid { + return nil, nil + } + return string(ns.YAxis), nil +} + +type Agency struct { + ID uuid.UUID `json:"id"` + Name string `json:"name"` +} + +type Alert struct { + ID uuid.UUID `json:"id"` + AlertConfigID uuid.UUID `json:"alert_config_id"` + CreatedAt time.Time `json:"created_at"` +} + +type AlertConfig struct { + ID uuid.UUID `json:"id"` + ProjectID uuid.UUID `json:"project_id"` + Name string `json:"name"` + Body string `json:"body"` + CreatedBy uuid.UUID `json:"created_by"` + CreatedAt time.Time `json:"created_at"` + UpdatedBy *uuid.UUID `json:"updated_by"` + UpdatedAt *time.Time `json:"updated_at"` + AlertTypeID uuid.UUID `json:"alert_type_id"` + StartedAt time.Time `json:"started_at"` + ScheduleInterval string `json:"schedule_interval"` + NMissedBeforeAlert int32 `json:"n_missed_before_alert"` + WarningInterval string `json:"warning_interval"` + RemindInterval string `json:"remind_interval"` + LastCheckedAt *time.Time `json:"last_checked_at"` + LastRemindedAt *time.Time `json:"last_reminded_at"` + Deleted bool `json:"deleted"` + MuteConsecutiveAlerts bool `json:"mute_consecutive_alerts"` +} + +type AlertConfigInstrument struct { + AlertConfigID uuid.UUID `json:"alert_config_id"` + InstrumentID uuid.UUID `json:"instrument_id"` +} + +type AlertEmailSubscription struct { + ID uuid.UUID `json:"id"` + AlertConfigID uuid.UUID `json:"alert_config_id"` + EmailID uuid.UUID `json:"email_id"` + MuteNotify bool `json:"mute_notify"` +} + +type AlertProfileSubscription struct { + ID uuid.UUID `json:"id"` + AlertConfigID uuid.UUID `json:"alert_config_id"` + ProfileID uuid.UUID `json:"profile_id"` + MuteUi bool `json:"mute_ui"` + MuteNotify bool `json:"mute_notify"` +} + +type AlertRead struct { + AlertID uuid.UUID `json:"alert_id"` + ProfileID uuid.UUID `json:"profile_id"` +} + +type AlertType struct { + ID uuid.UUID `json:"id"` + Name string `json:"name"` +} + +type AwareParameter struct { + ID uuid.UUID `json:"id"` + Key string `json:"key"` + ParameterID uuid.UUID `json:"parameter_id"` + UnitID uuid.UUID `json:"unit_id"` + TimeseriesSlug string `json:"timeseries_slug"` + TimeseriesName string `json:"timeseries_name"` +} + +type AwarePlatform struct { + ID uuid.UUID `json:"id"` + AwareID uuid.UUID `json:"aware_id"` + InstrumentID *uuid.UUID `json:"instrument_id"` +} + +type AwarePlatformParameterEnabled struct { + AwarePlatformID uuid.UUID `json:"aware_platform_id"` + AwareParameterID uuid.UUID `json:"aware_parameter_id"` +} + +type Calculation struct { + TimeseriesID uuid.UUID `json:"timeseries_id"` + Contents *string `json:"contents"` +} + +type CollectionGroup struct { + ID uuid.UUID `json:"id"` + ProjectID uuid.UUID `json:"project_id"` + Name string `json:"name"` + Slug string `json:"slug"` + CreatedBy uuid.UUID `json:"created_by"` + CreatedAt time.Time `json:"created_at"` + UpdatedBy *uuid.UUID `json:"updated_by"` + UpdatedAt *time.Time `json:"updated_at"` + SortOrder int32 `json:"sort_order"` +} + +type CollectionGroupTimeseries struct { + CollectionGroupID uuid.UUID `json:"collection_group_id"` + TimeseriesID uuid.UUID `json:"timeseries_id"` + SortOrder int32 `json:"sort_order"` +} + +type Config struct { + StaticHost string `json:"static_host"` + StaticPrefix string `json:"static_prefix"` +} + +type Datalogger struct { + ID uuid.UUID `json:"id"` + Sn string `json:"sn"` + ProjectID uuid.UUID `json:"project_id"` + CreatedBy uuid.UUID `json:"created_by"` + CreatedAt time.Time `json:"created_at"` + UpdatedBy *uuid.UUID `json:"updated_by"` + UpdatedAt *time.Time `json:"updated_at"` + Name string `json:"name"` + Slug string `json:"slug"` + ModelID uuid.UUID `json:"model_id"` + Deleted bool `json:"deleted"` +} + +type DataloggerEquivalencyTable struct { + ID uuid.UUID `json:"id"` + DataloggerID uuid.UUID `json:"datalogger_id"` + DataloggerDeleted bool `json:"datalogger_deleted"` + FieldName string `json:"field_name"` + DisplayName *string `json:"display_name"` + InstrumentID *uuid.UUID `json:"instrument_id"` + TimeseriesID *uuid.UUID `json:"timeseries_id"` + DataloggerTableID *uuid.UUID `json:"datalogger_table_id"` +} + +type DataloggerError struct { + DataloggerID uuid.UUID `json:"datalogger_id"` + ErrorMessage *string `json:"error_message"` + DataloggerTableID *uuid.UUID `json:"datalogger_table_id"` +} + +type DataloggerHash struct { + DataloggerID uuid.UUID `json:"datalogger_id"` + Hash string `json:"hash"` +} + +type DataloggerModel struct { + ID uuid.UUID `json:"id"` + Model *string `json:"model"` +} + +type DataloggerPreview struct { + Preview []byte `json:"preview"` + UpdatedAt time.Time `json:"updated_at"` + DataloggerTableID uuid.UUID `json:"datalogger_table_id"` +} + +type DataloggerTable struct { + ID uuid.UUID `json:"id"` + DataloggerID uuid.UUID `json:"datalogger_id"` + TableName string `json:"table_name"` +} + +type District struct { + ID uuid.UUID `json:"id"` + DivisionID uuid.UUID `json:"division_id"` + Name *string `json:"name"` + Initials *string `json:"initials"` + OfficeID *uuid.UUID `json:"office_id"` +} + +type Division struct { + ID uuid.UUID `json:"id"` + Name *string `json:"name"` + Initials *string `json:"initials"` + AgencyID uuid.UUID `json:"agency_id"` +} + +type Email struct { + ID uuid.UUID `json:"id"` + Email string `json:"email"` +} + +type Evaluation struct { + ID uuid.UUID `json:"id"` + ProjectID uuid.UUID `json:"project_id"` + Name string `json:"name"` + Body string `json:"body"` + StartedAt time.Time `json:"started_at"` + EndedAt time.Time `json:"ended_at"` + CreatedBy uuid.UUID `json:"created_by"` + CreatedAt time.Time `json:"created_at"` + UpdatedBy *uuid.UUID `json:"updated_by"` + UpdatedAt *time.Time `json:"updated_at"` + SubmittalID *uuid.UUID `json:"submittal_id"` +} + +type EvaluationInstrument struct { + EvaluationID *uuid.UUID `json:"evaluation_id"` + InstrumentID *uuid.UUID `json:"instrument_id"` +} + +type Heartbeat struct { + Time time.Time `json:"time"` +} + +type InclOpts struct { + InstrumentID uuid.UUID `json:"instrument_id"` + NumSegments int32 `json:"num_segments"` + BottomElevationTimeseriesID *uuid.UUID `json:"bottom_elevation_timeseries_id"` + InitialTime *time.Time `json:"initial_time"` +} + +type InclSegment struct { + InstrumentID uuid.UUID `json:"instrument_id"` + ID int32 `json:"id"` + DepthTimeseriesID *uuid.UUID `json:"depth_timeseries_id"` + A0TimeseriesID *uuid.UUID `json:"a0_timeseries_id"` + A180TimeseriesID *uuid.UUID `json:"a180_timeseries_id"` + B0TimeseriesID *uuid.UUID `json:"b0_timeseries_id"` + B180TimeseriesID *uuid.UUID `json:"b180_timeseries_id"` +} + +type InclinometerMeasurement struct { + Time time.Time `json:"time"` + Values []byte `json:"values"` + Creator uuid.UUID `json:"creator"` + CreateDate time.Time `json:"create_date"` + TimeseriesID uuid.UUID `json:"timeseries_id"` +} + +type Instrument struct { + ID uuid.UUID `json:"id"` + Deleted bool `json:"deleted"` + Slug string `json:"slug"` + Name string `json:"name"` + Geometry interface{} `json:"geometry"` + Station *int32 `json:"station"` + StationOffset *int32 `json:"station_offset"` + CreatedBy uuid.UUID `json:"created_by"` + CreatedAt time.Time `json:"created_at"` + UpdatedBy *uuid.UUID `json:"updated_by"` + UpdatedAt *time.Time `json:"updated_at"` + TypeID uuid.UUID `json:"type_id"` + NidID *string `json:"nid_id"` + UsgsID *string `json:"usgs_id"` + ShowCwmsTab bool `json:"show_cwms_tab"` +} + +type InstrumentConstants struct { + TimeseriesID uuid.UUID `json:"timeseries_id"` + InstrumentID uuid.UUID `json:"instrument_id"` +} + +type InstrumentGroup struct { + ID uuid.UUID `json:"id"` + Deleted bool `json:"deleted"` + Slug string `json:"slug"` + Name string `json:"name"` + Description *string `json:"description"` + CreatedBy uuid.UUID `json:"created_by"` + CreatedAt time.Time `json:"created_at"` + UpdatedBy *uuid.UUID `json:"updated_by"` + UpdatedAt *time.Time `json:"updated_at"` + ProjectID *uuid.UUID `json:"project_id"` +} + +type InstrumentGroupInstruments struct { + InstrumentID uuid.UUID `json:"instrument_id"` + InstrumentGroupID uuid.UUID `json:"instrument_group_id"` +} + +type InstrumentNote struct { + ID uuid.UUID `json:"id"` + InstrumentID uuid.UUID `json:"instrument_id"` + Title string `json:"title"` + Body string `json:"body"` + Time time.Time `json:"time"` + CreatedBy uuid.UUID `json:"created_by"` + CreatedAt time.Time `json:"created_at"` + UpdatedBy *uuid.UUID `json:"updated_by"` + UpdatedAt *time.Time `json:"updated_at"` +} + +type InstrumentStatus struct { + ID uuid.UUID `json:"id"` + InstrumentID uuid.UUID `json:"instrument_id"` + StatusID uuid.UUID `json:"status_id"` + Time time.Time `json:"time"` +} + +type InstrumentTelemetry struct { + ID uuid.UUID `json:"id"` + InstrumentID uuid.UUID `json:"instrument_id"` + TelemetryTypeID uuid.UUID `json:"telemetry_type_id"` + TelemetryID uuid.UUID `json:"telemetry_id"` +} + +type InstrumentType struct { + ID uuid.UUID `json:"id"` + Name string `json:"name"` + Icon *string `json:"icon"` +} + +type IpiOpts struct { + InstrumentID uuid.UUID `json:"instrument_id"` + NumSegments int32 `json:"num_segments"` + BottomElevationTimeseriesID *uuid.UUID `json:"bottom_elevation_timeseries_id"` + InitialTime *time.Time `json:"initial_time"` +} + +type IpiSegment struct { + InstrumentID uuid.UUID `json:"instrument_id"` + ID int32 `json:"id"` + LengthTimeseriesID *uuid.UUID `json:"length_timeseries_id"` + TiltTimeseriesID *uuid.UUID `json:"tilt_timeseries_id"` + IncDevTimeseriesID *uuid.UUID `json:"inc_dev_timeseries_id"` + TempTimeseriesID *uuid.UUID `json:"temp_timeseries_id"` +} + +type Measure struct { + ID uuid.UUID `json:"id"` + Name string `json:"name"` +} + +type Office struct { + ID uuid.UUID `json:"id"` +} + +type Parameter struct { + ID uuid.UUID `json:"id"` + Name string `json:"name"` +} + +type PlotBullseyeConfig struct { + PlotConfigID uuid.UUID `json:"plot_config_id"` + XAxisTimeseriesID *uuid.UUID `json:"x_axis_timeseries_id"` + YAxisTimeseriesID *uuid.UUID `json:"y_axis_timeseries_id"` +} + +type PlotConfiguration struct { + ID uuid.UUID `json:"id"` + Slug string `json:"slug"` + Name string `json:"name"` + ProjectID uuid.UUID `json:"project_id"` + CreatedBy uuid.UUID `json:"created_by"` + CreatedAt time.Time `json:"created_at"` + UpdatedBy *uuid.UUID `json:"updated_by"` + UpdatedAt *time.Time `json:"updated_at"` + PlotType PlotType `json:"plot_type"` +} + +type PlotConfigurationCustomShape struct { + PlotConfigurationID *uuid.UUID `json:"plot_configuration_id"` + Enabled bool `json:"enabled"` + Name string `json:"name"` + DataPoint float32 `json:"data_point"` + Color string `json:"color"` +} + +type PlotConfigurationSettings struct { + ID uuid.UUID `json:"id"` + ShowMasked bool `json:"show_masked"` + ShowNonvalidated bool `json:"show_nonvalidated"` + ShowComments bool `json:"show_comments"` + AutoRange bool `json:"auto_range"` + DateRange string `json:"date_range"` + Threshold int32 `json:"threshold"` +} + +type PlotConfigurationTimeseriesTrace struct { + PlotConfigurationID *uuid.UUID `json:"plot_configuration_id"` + TimeseriesID *uuid.UUID `json:"timeseries_id"` + TraceOrder int32 `json:"trace_order"` + TraceType TraceType `json:"trace_type"` + Color string `json:"color"` + LineStyle LineStyle `json:"line_style"` + Width float32 `json:"width"` + ShowMarkers bool `json:"show_markers"` + YAxis YAxis `json:"y_axis"` +} + +type PlotContourConfig struct { + PlotConfigID uuid.UUID `json:"plot_config_id"` + Time *time.Time `json:"time"` + LocfBackfill string `json:"locf_backfill"` + GradientSmoothing bool `json:"gradient_smoothing"` + ContourSmoothing bool `json:"contour_smoothing"` + ShowLabels bool `json:"show_labels"` +} + +type PlotContourConfigTimeseries struct { + PlotContourConfigID uuid.UUID `json:"plot_contour_config_id"` + TimeseriesID uuid.UUID `json:"timeseries_id"` +} + +type PlotProfileConfig struct { + PlotConfigID uuid.UUID `json:"plot_config_id"` + InstrumentID uuid.UUID `json:"instrument_id"` +} + +type PlotScatterLineConfig struct { + PlotConfigID uuid.UUID `json:"plot_config_id"` + YAxisTitle *string `json:"y_axis_title"` + Y2AxisTitle *string `json:"y2_axis_title"` +} + +type Profile struct { + ID uuid.UUID `json:"id"` + Edipi int64 `json:"edipi"` + Username string `json:"username"` + Email string `json:"email"` + IsAdmin bool `json:"is_admin"` + DisplayName string `json:"display_name"` +} + +type ProfileProjectRoles struct { + ID uuid.UUID `json:"id"` + ProfileID uuid.UUID `json:"profile_id"` + RoleID uuid.UUID `json:"role_id"` + ProjectID uuid.UUID `json:"project_id"` + GrantedBy *uuid.UUID `json:"granted_by"` + GrantedDate time.Time `json:"granted_date"` +} + +type ProfileToken struct { + ID uuid.UUID `json:"id"` + TokenID string `json:"token_id"` + ProfileID uuid.UUID `json:"profile_id"` + Issued time.Time `json:"issued"` + Hash string `json:"hash"` +} + +type Project struct { + ID uuid.UUID `json:"id"` + Image *string `json:"image"` + FederalID *string `json:"federal_id"` + Deleted bool `json:"deleted"` + Slug string `json:"slug"` + Name string `json:"name"` + CreatedBy uuid.UUID `json:"created_by"` + CreatedAt time.Time `json:"created_at"` + UpdatedBy *uuid.UUID `json:"updated_by"` + UpdatedAt *time.Time `json:"updated_at"` + DistrictID *uuid.UUID `json:"district_id"` +} + +type ProjectInstrument struct { + ProjectID uuid.UUID `json:"project_id"` + InstrumentID uuid.UUID `json:"instrument_id"` +} + +type ReportConfig struct { + ID uuid.UUID `json:"id"` + ProjectID uuid.UUID `json:"project_id"` + Slug string `json:"slug"` + Name string `json:"name"` + Description string `json:"description"` + CreatedBy uuid.UUID `json:"created_by"` + CreatedAt time.Time `json:"created_at"` + UpdatedBy *uuid.UUID `json:"updated_by"` + UpdatedAt *time.Time `json:"updated_at"` + DateRange *string `json:"date_range"` + DateRangeEnabled *bool `json:"date_range_enabled"` + ShowMasked *bool `json:"show_masked"` + ShowMaskedEnabled *bool `json:"show_masked_enabled"` + ShowNonvalidated *bool `json:"show_nonvalidated"` + ShowNonvalidatedEnabled *bool `json:"show_nonvalidated_enabled"` +} + +type ReportConfigPlotConfig struct { + ReportConfigID uuid.UUID `json:"report_config_id"` + PlotConfigID uuid.UUID `json:"plot_config_id"` +} + +type ReportDownloadJob struct { + ID uuid.UUID `json:"id"` + ReportConfigID *uuid.UUID `json:"report_config_id"` + CreatedBy uuid.UUID `json:"created_by"` + CreatedAt time.Time `json:"created_at"` + Status JobStatus `json:"status"` + FileKey *string `json:"file_key"` + FileExpiry *time.Time `json:"file_expiry"` + Progress int32 `json:"progress"` + ProgressUpdatedAt time.Time `json:"progress_updated_at"` +} + +type Role struct { + ID uuid.UUID `json:"id"` + Name string `json:"name"` + Deleted bool `json:"deleted"` +} + +type SaaOpts struct { + InstrumentID uuid.UUID `json:"instrument_id"` + NumSegments int32 `json:"num_segments"` + BottomElevationTimeseriesID *uuid.UUID `json:"bottom_elevation_timeseries_id"` + InitialTime *time.Time `json:"initial_time"` +} + +type SaaSegment struct { + InstrumentID uuid.UUID `json:"instrument_id"` + ID int32 `json:"id"` + LengthTimeseriesID *uuid.UUID `json:"length_timeseries_id"` + XTimeseriesID *uuid.UUID `json:"x_timeseries_id"` + YTimeseriesID *uuid.UUID `json:"y_timeseries_id"` + ZTimeseriesID *uuid.UUID `json:"z_timeseries_id"` + TempTimeseriesID *uuid.UUID `json:"temp_timeseries_id"` +} + +type Status struct { + ID uuid.UUID `json:"id"` + Name string `json:"name"` + Description *string `json:"description"` +} + +type Submittal struct { + ID uuid.UUID `json:"id"` + AlertConfigID *uuid.UUID `json:"alert_config_id"` + SubmittalStatusID *uuid.UUID `json:"submittal_status_id"` + CompletedAt *time.Time `json:"completed_at"` + CreatedAt time.Time `json:"created_at"` + DueAt time.Time `json:"due_at"` + MarkedAsMissing bool `json:"marked_as_missing"` + WarningSent bool `json:"warning_sent"` +} + +type SubmittalStatus struct { + ID uuid.UUID `json:"id"` + Name string `json:"name"` +} + +type TelemetryGoes struct { + ID uuid.UUID `json:"id"` + NesdisID string `json:"nesdis_id"` +} + +type TelemetryIridium struct { + ID uuid.UUID `json:"id"` + Imei string `json:"imei"` +} + +type TelemetryType struct { + ID uuid.UUID `json:"id"` + Slug string `json:"slug"` + Name string `json:"name"` +} + +type Timeseries struct { + ID uuid.UUID `json:"id"` + Slug string `json:"slug"` + Name string `json:"name"` + InstrumentID *uuid.UUID `json:"instrument_id"` + ParameterID uuid.UUID `json:"parameter_id"` + UnitID uuid.UUID `json:"unit_id"` + Type TimeseriesType `json:"type"` +} + +type TimeseriesCwms struct { + TimeseriesID uuid.UUID `json:"timeseries_id"` + CwmsTimeseriesID string `json:"cwms_timeseries_id"` + CwmsOfficeID string `json:"cwms_office_id"` + CwmsExtentEarliestTime time.Time `json:"cwms_extent_earliest_time"` + CwmsExtentLatestTime *time.Time `json:"cwms_extent_latest_time"` +} + +type TimeseriesMeasurement struct { + Time time.Time `json:"time"` + Value float64 `json:"value"` + TimeseriesID uuid.UUID `json:"timeseries_id"` +} + +type TimeseriesNotes struct { + Masked *bool `json:"masked"` + Validated *bool `json:"validated"` + Annotation *string `json:"annotation"` + TimeseriesID uuid.UUID `json:"timeseries_id"` + Time time.Time `json:"time"` +} + +type Unit struct { + ID uuid.UUID `json:"id"` + Name string `json:"name"` + Abbreviation string `json:"abbreviation"` + UnitFamilyID *uuid.UUID `json:"unit_family_id"` + MeasureID *uuid.UUID `json:"measure_id"` +} + +type UnitFamily struct { + ID uuid.UUID `json:"id"` + Name string `json:"name"` +} + +type UploaderConfig struct { + ID uuid.UUID `json:"id"` + ProjectID uuid.UUID `json:"project_id"` + Slug string `json:"slug"` + Name string `json:"name"` + Description string `json:"description"` + CreatedAt time.Time `json:"created_at"` + CreatedBy uuid.UUID `json:"created_by"` + UpdatedAt *time.Time `json:"updated_at"` + UpdatedBy *uuid.UUID `json:"updated_by"` + Type UploaderConfigType `json:"type"` + TzName string `json:"tz_name"` + TimeField string `json:"time_field"` + ValidatedFieldEnabled bool `json:"validated_field_enabled"` + ValidatedField *string `json:"validated_field"` + MaskedFieldEnabled bool `json:"masked_field_enabled"` + MaskedField *string `json:"masked_field"` + CommentFieldEnabled bool `json:"comment_field_enabled"` + CommentField *string `json:"comment_field"` + ColumnOffset int32 `json:"column_offset"` + RowOffset int32 `json:"row_offset"` +} + +type UploaderConfigMapping struct { + UploaderConfigID uuid.UUID `json:"uploader_config_id"` + FieldName string `json:"field_name"` + TimeseriesID *uuid.UUID `json:"timeseries_id"` +} + +type VAlert struct { + ID uuid.UUID `json:"id"` + AlertConfigID uuid.UUID `json:"alert_config_id"` + CreatedAt time.Time `json:"created_at"` + ProjectID uuid.UUID `json:"project_id"` + ProjectName string `json:"project_name"` + Name string `json:"name"` + Body string `json:"body"` + Instruments []InstrumentIDName `json:"instruments"` +} + +type VAlertCheckEvaluationSubmittal struct { + AlertConfigID uuid.UUID `json:"alert_config_id"` + SubmittalID uuid.UUID `json:"submittal_id"` + Submittal *VSubmittal `json:"submittal"` + ShouldWarn bool `json:"should_warn"` + ShouldAlert bool `json:"should_alert"` + ShouldRemind bool `json:"should_remind"` +} + +type VAlertCheckMeasurementSubmittal struct { + AlertConfigID uuid.UUID `json:"alert_config_id"` + SubmittalID uuid.UUID `json:"submittal_id"` + Submittal *VSubmittal `json:"submittal"` + ShouldWarn bool `json:"should_warn"` + ShouldAlert bool `json:"should_alert"` + ShouldRemind bool `json:"should_remind"` + AffectedTimeseries []AlertCheckMeasurementSubmittalAffectedTimeseries `json:"affected_timeseries"` +} + +type VAlertConfig struct { + ID uuid.UUID `json:"id"` + Name string `json:"name"` + Body string `json:"body"` + CreatedBy *uuid.UUID `json:"created_by"` + CreatedByUsername string `json:"created_by_username"` + CreatedAt time.Time `json:"created_at"` + UpdatedBy *uuid.UUID `json:"updated_by"` + UpdatedByUsername *string `json:"updated_by_username"` + UpdatedAt *time.Time `json:"updated_at"` + ProjectID uuid.UUID `json:"project_id"` + ProjectName string `json:"project_name"` + AlertTypeID uuid.UUID `json:"alert_type_id"` + AlertType string `json:"alert_type"` + StartedAt time.Time `json:"started_at"` + ScheduleInterval string `json:"schedule_interval"` + MuteConsecutiveAlerts bool `json:"mute_consecutive_alerts"` + RemindInterval string `json:"remind_interval"` + WarningInterval string `json:"warning_interval"` + LastCheckedAt *time.Time `json:"last_checked_at"` + LastRemindedAt *time.Time `json:"last_reminded_at"` + CreateNextSubmittalFrom *time.Time `json:"create_next_submittal_from"` + Instruments []InstrumentIDName `json:"instruments"` + AlertEmailSubscriptions []EmailAutocompleteResult `json:"alert_email_subscriptions"` +} + +type VAwarePlatformParameterEnabled struct { + InstrumentID uuid.UUID `json:"instrument_id"` + AwareID uuid.UUID `json:"aware_id"` + AwareParameterKey string `json:"aware_parameter_key"` + TimeseriesID *uuid.UUID `json:"timeseries_id"` +} + +type VCollectionGroupDetails struct { + ID uuid.UUID `json:"id"` + ProjectID uuid.UUID `json:"project_id"` + Name string `json:"name"` + Slug string `json:"slug"` + CreatedBy uuid.UUID `json:"created_by"` + CreatedAt time.Time `json:"created_at"` + UpdatedBy *uuid.UUID `json:"updated_by"` + UpdatedAt *time.Time `json:"updated_at"` + SortOrder int32 `json:"sort_order"` + Timeseries []CollectionGroupDetailsTimeseries `json:"timeseries"` +} + +type VDatalogger struct { + ID uuid.UUID `json:"id"` + Sn string `json:"sn"` + ProjectID uuid.UUID `json:"project_id"` + CreatedBy uuid.UUID `json:"created_by"` + CreatedByUsername string `json:"created_by_username"` + CreatedAt time.Time `json:"created_at"` + UpdatedBy uuid.UUID `json:"updated_by"` + UpdatedByUsername string `json:"updated_by_username"` + UpdatedAt *time.Time `json:"updated_at"` + Name string `json:"name"` + Slug string `json:"slug"` + ModelID uuid.UUID `json:"model_id"` + Model *string `json:"model"` + Errors []string `json:"errors"` + Tables []DataloggerTableIDName `json:"tables"` +} + +type VDataloggerEquivalencyTable struct { + DataloggerID uuid.UUID `json:"datalogger_id"` + DataloggerTableID uuid.UUID `json:"datalogger_table_id"` + DataloggerTableName string `json:"datalogger_table_name"` + Fields []DataloggerEquivalencyTableField `json:"fields"` +} + +type VDataloggerHash struct { + DataloggerID uuid.UUID `json:"datalogger_id"` + Hash string `json:"hash"` + Model *string `json:"model"` + Sn string `json:"sn"` +} + +type VDataloggerPreview struct { + DataloggerTableID uuid.UUID `json:"datalogger_table_id"` + Preview json.RawMessage `json:"preview"` + UpdatedAt time.Time `json:"updated_at"` +} + +type VDistrict struct { + Agency string `json:"agency"` + ID uuid.UUID `json:"id"` + Name *string `json:"name"` + Initials *string `json:"initials"` + DivisionName *string `json:"division_name"` + DivisionInitials *string `json:"division_initials"` + OfficeID *uuid.UUID `json:"office_id"` +} + +type VDistrictRollup struct { + AlertTypeID uuid.UUID `json:"alert_type_id"` + OfficeID *uuid.UUID `json:"office_id"` + DistrictInitials *string `json:"district_initials"` + ProjectName string `json:"project_name"` + ProjectID uuid.UUID `json:"project_id"` + Month time.Time `json:"month"` + ExpectedTotalSubmittals int64 `json:"expected_total_submittals"` + ActualTotalSubmittals int64 `json:"actual_total_submittals"` + RedSubmittals int64 `json:"red_submittals"` + YellowSubmittals int64 `json:"yellow_submittals"` + GreenSubmittals int64 `json:"green_submittals"` +} + +type VDomain struct { + ID uuid.UUID `json:"id"` + Group string `json:"group"` + Value string `json:"value"` + Description *string `json:"description"` +} + +type VDomainGroup struct { + Group string `json:"group"` + Opts []DomainGroupOpt `json:"opts"` +} + +type VEmailAutocomplete struct { + ID uuid.UUID `json:"id"` + UserType string `json:"user_type"` + Username interface{} `json:"username"` + Email string `json:"email"` + UsernameEmail string `json:"username_email"` +} + +type VEvaluation struct { + ID uuid.UUID `json:"id"` + Name string `json:"name"` + Body string `json:"body"` + CreatedBy *uuid.UUID `json:"created_by"` + CreatedByUsername string `json:"created_by_username"` + CreatedAt time.Time `json:"created_at"` + UpdatedBy *uuid.UUID `json:"updated_by"` + UpdatedByUsername *string `json:"updated_by_username"` + UpdatedAt *time.Time `json:"updated_at"` + ProjectID uuid.UUID `json:"project_id"` + ProjectName string `json:"project_name"` + AlertConfigID *uuid.UUID `json:"alert_config_id"` + AlertConfigName *string `json:"alert_config_name"` + SubmittalID *uuid.UUID `json:"submittal_id"` + StartedAt time.Time `json:"started_at"` + EndedAt time.Time `json:"ended_at"` + Instruments []InstrumentIDName `json:"instruments"` +} + +type VInclMeasurement struct { + InstrumentID uuid.UUID `json:"instrument_id"` + Time time.Time `json:"time"` + Measurements interface{} `json:"measurements"` +} + +type VInclSegment struct { + ID int32 `json:"id"` + InstrumentID uuid.UUID `json:"instrument_id"` + DepthTimeseriesID *uuid.UUID `json:"depth_timeseries_id"` + A0TimeseriesID *uuid.UUID `json:"a0_timeseries_id"` + A180TimeseriesID *uuid.UUID `json:"a180_timeseries_id"` + B0TimeseriesID *uuid.UUID `json:"b0_timeseries_id"` + B180TimeseriesID *uuid.UUID `json:"b180_timeseries_id"` +} + +type VInstrument struct { + ID uuid.UUID `json:"id"` + StatusID uuid.UUID `json:"status_id"` + Status string `json:"status"` + StatusTime time.Time `json:"status_time"` + Slug string `json:"slug"` + Name string `json:"name"` + TypeID uuid.UUID `json:"type_id"` + ShowCwmsTab bool `json:"show_cwms_tab"` + Type string `json:"type"` + Icon *string `json:"icon"` + Geometry json.RawMessage `json:"geometry"` + Station *int32 `json:"station"` + Offset *int32 `json:"offset"` + CreatedBy uuid.UUID `json:"created_by"` + CreatedAt time.Time `json:"created_at"` + UpdatedBy *uuid.UUID `json:"updated_by"` + UpdatedAt *time.Time `json:"updated_at"` + NidID *string `json:"nid_id"` + UsgsID *string `json:"usgs_id"` + Telemetry []IDSlugName `json:"telemetry"` + HasCwms bool `json:"has_cwms"` + Projects []IDSlugName `json:"projects"` + Constants []uuid.UUID `json:"constants"` + Groups []uuid.UUID `json:"groups"` + AlertConfigs []uuid.UUID `json:"alert_configs"` + Opts interface{} `json:"opts"` +} + +type VInstrumentGroup struct { + ID uuid.UUID `json:"id"` + Slug string `json:"slug"` + Name string `json:"name"` + Description *string `json:"description"` + CreatedBy uuid.UUID `json:"created_by"` + CreatedAt time.Time `json:"created_at"` + UpdatedBy *uuid.UUID `json:"updated_by"` + UpdatedAt *time.Time `json:"updated_at"` + ProjectID *uuid.UUID `json:"project_id"` + InstrumentCount int64 `json:"instrument_count"` + TimeseriesCount interface{} `json:"timeseries_count"` +} + +type VInstrumentStatus struct { + ID uuid.UUID `json:"id"` + InstrumentID uuid.UUID `json:"instrument_id"` + StatusID uuid.UUID `json:"status_id"` + Status string `json:"status"` + Time time.Time `json:"time"` +} + +type VInstrumentTelemetry struct { + ID uuid.UUID `json:"id"` + InstrumentID uuid.UUID `json:"instrument_id"` + TelemetryTypeID uuid.UUID `json:"telemetry_type_id"` + TelemetryTypeSlug string `json:"telemetry_type_slug"` + TelemetryTypeName string `json:"telemetry_type_name"` +} + +type VIpiMeasurement struct { + InstrumentID uuid.UUID `json:"instrument_id"` + Time time.Time `json:"time"` + Measurements []IpiMeasurement `json:"measurements"` +} + +type VIpiSegment struct { + ID int32 `json:"id"` + InstrumentID uuid.UUID `json:"instrument_id"` + LengthTimeseriesID *uuid.UUID `json:"length_timeseries_id"` + Length float64 `json:"length"` + TiltTimeseriesID *uuid.UUID `json:"tilt_timeseries_id"` + IncDevTimeseriesID *uuid.UUID `json:"inc_dev_timeseries_id"` +} + +type VPlotConfiguration struct { + ID uuid.UUID `json:"id"` + Slug string `json:"slug"` + Name string `json:"name"` + ProjectID uuid.UUID `json:"project_id"` + CreatedBy uuid.UUID `json:"created_by"` + CreatedAt time.Time `json:"created_at"` + UpdatedBy *uuid.UUID `json:"updated_by"` + UpdatedAt *time.Time `json:"updated_at"` + ShowMasked bool `json:"show_masked"` + ShowNonvalidated bool `json:"show_nonvalidated"` + ShowComments bool `json:"show_comments"` + AutoRange bool `json:"auto_range"` + DateRange string `json:"date_range"` + Threshold int32 `json:"threshold"` + ReportConfigs []IDSlugName `json:"report_configs"` + PlotType PlotType `json:"plot_type"` + Display interface{} `json:"display"` +} + +type VProfile struct { + ID uuid.UUID `json:"id"` + Edipi int64 `json:"edipi"` + Username string `json:"username"` + DisplayName string `json:"display_name"` + Email string `json:"email"` + IsAdmin bool `json:"is_admin"` + Roles []string `json:"roles"` + Tokens []VProfileToken `json:"tokens"` +} + +type VProfileProjectRoles struct { + ID uuid.UUID `json:"id"` + ProfileID uuid.UUID `json:"profile_id"` + Edipi int64 `json:"edipi"` + Username string `json:"username"` + DisplayName string `json:"display_name"` + Email string `json:"email"` + IsAdmin bool `json:"is_admin"` + ProjectID uuid.UUID `json:"project_id"` + RoleID uuid.UUID `json:"role_id"` + Role string `json:"role"` + Rolename interface{} `json:"rolename"` +} + +type VProject struct { + ID uuid.UUID `json:"id"` + FederalID *string `json:"federal_id"` + Image interface{} `json:"image"` + DistrictID *uuid.UUID `json:"district_id"` + OfficeID *uuid.UUID `json:"office_id"` + Slug string `json:"slug"` + Name string `json:"name"` + CreatedBy uuid.UUID `json:"created_by"` + CreatedByUsername string `json:"created_by_username"` + CreatedAt time.Time `json:"created_at"` + UpdatedBy *uuid.UUID `json:"updated_by"` + UpdatedByUsername *string `json:"updated_by_username"` + UpdatedAt *time.Time `json:"updated_at"` + InstrumentCount int64 `json:"instrument_count"` + InstrumentGroupCount int64 `json:"instrument_group_count"` +} + +type VReportConfig struct { + ID uuid.UUID `json:"id"` + Slug string `json:"slug"` + Name string `json:"name"` + Description string `json:"description"` + ProjectID uuid.UUID `json:"project_id"` + ProjectName string `json:"project_name"` + DistrictName *string `json:"district_name"` + CreatedBy uuid.UUID `json:"created_by"` + CreatedByUsername string `json:"created_by_username"` + CreatedAt time.Time `json:"created_at"` + UpdatedBy *uuid.UUID `json:"updated_by"` + UpdatedByUsername *string `json:"updated_by_username"` + UpdatedAt *time.Time `json:"updated_at"` + PlotConfigs []IDSlugName `json:"plot_configs"` + GlobalOverrides ReportConfigGlobalOverrides `json:"global_overrides"` +} + +type VSaaMeasurement struct { + InstrumentID uuid.UUID `json:"instrument_id"` + Time time.Time `json:"time"` + Measurements []SaaMeasurement `json:"measurements"` +} + +type VSaaSegment struct { + ID int32 `json:"id"` + InstrumentID uuid.UUID `json:"instrument_id"` + LengthTimeseriesID *uuid.UUID `json:"length_timeseries_id"` + Length float64 `json:"length"` + XTimeseriesID *uuid.UUID `json:"x_timeseries_id"` + YTimeseriesID *uuid.UUID `json:"y_timeseries_id"` + ZTimeseriesID *uuid.UUID `json:"z_timeseries_id"` + TempTimeseriesID *uuid.UUID `json:"temp_timeseries_id"` +} + +type VSubmittal struct { + ID uuid.UUID `json:"id"` + AlertConfigID uuid.UUID `json:"alert_config_id"` + AlertConfigName string `json:"alert_config_name"` + AlertTypeID uuid.UUID `json:"alert_type_id"` + AlertTypeName string `json:"alert_type_name"` + ProjectID uuid.UUID `json:"project_id"` + SubmittalStatusID uuid.UUID `json:"submittal_status_id"` + SubmittalStatusName string `json:"submittal_status_name"` + CompletedAt *time.Time `json:"completed_at"` + CreatedAt time.Time `json:"created_at"` + DueAt time.Time `json:"due_at"` + MarkedAsMissing bool `json:"marked_as_missing"` + WarningSent bool `json:"warning_sent"` +} + +type VTimeseries struct { + ID uuid.UUID `json:"id"` + Slug string `json:"slug"` + Name string `json:"name"` + Type TimeseriesType `json:"type"` + IsComputed bool `json:"is_computed"` + Variable interface{} `json:"variable"` + InstrumentID uuid.UUID `json:"instrument_id"` + InstrumentSlug string `json:"instrument_slug"` + Instrument string `json:"instrument"` + ParameterID uuid.UUID `json:"parameter_id"` + Parameter string `json:"parameter"` + UnitID uuid.UUID `json:"unit_id"` + Unit string `json:"unit"` +} + +type VTimeseriesComputed struct { + ID uuid.UUID `json:"id"` + Slug string `json:"slug"` + Name string `json:"name"` + InstrumentID *uuid.UUID `json:"instrument_id"` + ParameterID uuid.UUID `json:"parameter_id"` + UnitID uuid.UUID `json:"unit_id"` + Type TimeseriesType `json:"type"` + Contents *string `json:"contents"` +} + +type VTimeseriesCwms struct { + ID uuid.UUID `json:"id"` + Slug string `json:"slug"` + Name string `json:"name"` + Type TimeseriesType `json:"type"` + IsComputed bool `json:"is_computed"` + Variable interface{} `json:"variable"` + InstrumentID uuid.UUID `json:"instrument_id"` + InstrumentSlug string `json:"instrument_slug"` + Instrument string `json:"instrument"` + ParameterID uuid.UUID `json:"parameter_id"` + Parameter string `json:"parameter"` + UnitID uuid.UUID `json:"unit_id"` + Unit string `json:"unit"` + CwmsTimeseriesID string `json:"cwms_timeseries_id"` + CwmsOfficeID string `json:"cwms_office_id"` + CwmsExtentEarliestTime time.Time `json:"cwms_extent_earliest_time"` + CwmsExtentLatestTime *time.Time `json:"cwms_extent_latest_time"` +} + +type VTimeseriesDependency struct { + ID uuid.UUID `json:"id"` + InstrumentID *uuid.UUID `json:"instrument_id"` + ParsedVariable interface{} `json:"parsed_variable"` + DependencyTimeseriesID *uuid.UUID `json:"dependency_timeseries_id"` +} + +type VTimeseriesMeasurement struct { + TimeseriesID uuid.UUID `json:"timeseries_id"` + Time time.Time `json:"time"` + Value float64 `json:"value"` + Masked *bool `json:"masked"` + Validated *bool `json:"validated"` + Annotation *string `json:"annotation"` +} + +type VTimeseriesProjectMap struct { + TimeseriesID uuid.UUID `json:"timeseries_id"` + ProjectID *uuid.UUID `json:"project_id"` +} + +type VTimeseriesStored struct { + ID uuid.UUID `json:"id"` + Slug string `json:"slug"` + Name string `json:"name"` + InstrumentID *uuid.UUID `json:"instrument_id"` + ParameterID uuid.UUID `json:"parameter_id"` + UnitID uuid.UUID `json:"unit_id"` + Type TimeseriesType `json:"type"` +} + +type VUnit struct { + ID uuid.UUID `json:"id"` + Name string `json:"name"` + Abbreviation string `json:"abbreviation"` + UnitFamilyID *uuid.UUID `json:"unit_family_id"` + UnitFamily string `json:"unit_family"` + MeasureID *uuid.UUID `json:"measure_id"` + Measure string `json:"measure"` +} + +type VUploaderConfig struct { + ID uuid.UUID `json:"id"` + ProjectID uuid.UUID `json:"project_id"` + Slug string `json:"slug"` + Name string `json:"name"` + Description string `json:"description"` + CreatedAt time.Time `json:"created_at"` + CreatedBy uuid.UUID `json:"created_by"` + CreatedByUsername string `json:"created_by_username"` + UpdatedBy *uuid.UUID `json:"updated_by"` + UpdatedAt *time.Time `json:"updated_at"` + UpdatedByUsername *string `json:"updated_by_username"` + Type UploaderConfigType `json:"type"` + TzName string `json:"tz_name"` + TimeField string `json:"time_field"` + ValidatedFieldEnabled bool `json:"validated_field_enabled"` + ValidatedField *string `json:"validated_field"` + MaskedFieldEnabled bool `json:"masked_field_enabled"` + MaskedField *string `json:"masked_field"` + CommentFieldEnabled bool `json:"comment_field_enabled"` + CommentField *string `json:"comment_field"` + ColumnOffset int32 `json:"column_offset"` + RowOffset int32 `json:"row_offset"` +} diff --git a/api/internal/db/overrides.go b/api/internal/db/overrides.go new file mode 100644 index 00000000..fde5d3d9 --- /dev/null +++ b/api/internal/db/overrides.go @@ -0,0 +1,129 @@ +package db + +// database overrides for scanning json into nested structs +// see sqlc.yml overrides + +import ( + "encoding/json" + "fmt" + "time" + + "github.com/google/uuid" +) + +type Opts map[string]interface{} + +func (o *Opts) Scan(src interface{}) error { + b, ok := src.(string) + if !ok { + return fmt.Errorf("type assertion failed") + } + return json.Unmarshal([]byte(b), o) +} + +type AlertCheckMeasurementSubmittalAffectedTimeseries struct { + InstrumentName string `json:"instrument_name"` + TimeseriesName string `json:"timeseries_name"` + Status string `json:"status"` +} + +type DataloggerEquivalencyTableField struct { + ID uuid.UUID `json:"id"` + FieldName string `json:"field_name"` + DisplayName string `json:"display_name"` + InstrumentID *uuid.UUID `json:"instrument_id"` + TimeseriesID *uuid.UUID `json:"timeseries_id"` +} + +type DataloggerTableIDName struct { + ID uuid.UUID `json:"id"` + TableName string `json:"table_name"` +} + +type DomainGroupOpt struct { + ID uuid.UUID `json:"id" db:"id"` + Value string `json:"value" db:"value"` + Description *string `json:"description" db:"description"` +} + +type CollectionGroupDetailsTimeseries struct { + VTimeseries + LatestTime *time.Time `json:"latest_time" db:"latest_time"` + LatestValue *float32 `json:"latest_value" db:"latest_value"` + SortOrder int32 `json:"sort_order" db:"sort_order"` +} + +type EmailAutocompleteResult struct { + ID uuid.UUID `json:"id"` + UserType string `json:"user_type"` + Username *string `json:"username"` + Email string `json:"email"` +} + +type IDSlugName struct { + ID uuid.UUID `json:"id"` + Slug string `json:"slug"` + Name string `json:"name"` +} + +type InstrumentIDName struct { + InstrumentID uuid.UUID `json:"instrument_id"` + InstrumentName string `json:"instrument_name"` +} + +type IpiMeasurement struct { + SegmentID int `json:"segment_id"` + Tilt *float64 `json:"tilt"` + IncDev *float64 `json:"inc_dev"` + CumDev *float64 `json:"cum_dev"` + Temp *float64 `json:"temp"` + Elelvation *float64 `json:"elevation"` +} + +type Measurement struct { + TimeseriesID uuid.UUID `json:"-"` + Time time.Time `json:"time"` + Value float64 `json:"value"` + Masked *bool `json:"masked,omitempty"` + Validated *bool `json:"validated,omitempty"` + Annotation *string `json:"annotation,omitempty"` + Error string `json:"error,omitempty"` +} + +type VProfileToken struct { + TokenID string `json:"token_id"` + Issued time.Time `json:"issued"` +} + +type ReportConfigGlobalOverrides struct { + DateRange TextOption `json:"date_range"` + ShowMasked ToggleOption `json:"show_masked"` + ShowNonvalidated ToggleOption `json:"show_nonvalidated"` +} + +type SaaMeasurement struct { + SegmentID int `json:"segment_id"` + X *float64 `json:"x"` + Y *float64 `json:"y"` + Z *float64 `json:"z"` + Temp *float64 `json:"temp"` + XIncrement *float64 `json:"x_increment"` + YIncrement *float64 `json:"y_increment"` + ZIncrement *float64 `json:"z_increment"` + TempIncrement *float64 `json:"temp_increment"` + XCumDev *float64 `json:"x_cum_dev"` + YCumDev *float64 `json:"y_cum_dev"` + ZCumDev *float64 `json:"z_cum_dev"` + TempCumDev *float64 `json:"temp_cum_dev"` + Elevation *float64 `json:"elevation"` +} + +type TextOption struct { + Enabled bool `json:"enabled"` + Value string `json:"value"` +} + +type ToggleOption struct { + Enabled bool `json:"enabled"` + Value bool `json:"value"` +} diff --git a/api/internal/db/plot_config.sql_gen.go b/api/internal/db/plot_config.sql_gen.go new file mode 100644 index 00000000..176d79a5 --- /dev/null +++ b/api/internal/db/plot_config.sql_gen.go @@ -0,0 +1,188 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.27.0 +// source: plot_config.sql + +package db + +import ( + "context" + "time" + + "github.com/google/uuid" +) + +const plotConfigCreate = `-- name: PlotConfigCreate :one +insert into plot_configuration (slug, name, project_id, created_by, created_at, plot_type) values (slugify($1, 'plot_configuration'), $1, $2, $3, $4, $5) +returning id +` + +type PlotConfigCreateParams struct { + Name string `json:"name"` + ProjectID uuid.UUID `json:"project_id"` + CreatedBy uuid.UUID `json:"created_by"` + CreatedAt time.Time `json:"created_at"` + PlotType PlotType `json:"plot_type"` +} + +func (q *Queries) PlotConfigCreate(ctx context.Context, arg PlotConfigCreateParams) (uuid.UUID, error) { + row := q.db.QueryRow(ctx, plotConfigCreate, + arg.Name, + arg.ProjectID, + arg.CreatedBy, + arg.CreatedAt, + arg.PlotType, + ) + var id uuid.UUID + err := row.Scan(&id) + return id, err +} + +const plotConfigDelete = `-- name: PlotConfigDelete :exec +delete from plot_configuration where project_id = $1 and id = $2 +` + +type PlotConfigDeleteParams struct { + ProjectID uuid.UUID `json:"project_id"` + ID uuid.UUID `json:"id"` +} + +func (q *Queries) PlotConfigDelete(ctx context.Context, arg PlotConfigDeleteParams) error { + _, err := q.db.Exec(ctx, plotConfigDelete, arg.ProjectID, arg.ID) + return err +} + +const plotConfigGet = `-- name: PlotConfigGet :one +select id, slug, name, project_id, created_by, created_at, updated_by, updated_at, show_masked, show_nonvalidated, show_comments, auto_range, date_range, threshold, report_configs, plot_type, display +from v_plot_configuration +where id = $1 +` + +func (q *Queries) PlotConfigGet(ctx context.Context, id uuid.UUID) (VPlotConfiguration, error) { + row := q.db.QueryRow(ctx, plotConfigGet, id) + var i VPlotConfiguration + err := row.Scan( + &i.ID, + &i.Slug, + &i.Name, + &i.ProjectID, + &i.CreatedBy, + &i.CreatedAt, + &i.UpdatedBy, + &i.UpdatedAt, + &i.ShowMasked, + &i.ShowNonvalidated, + &i.ShowComments, + &i.AutoRange, + &i.DateRange, + &i.Threshold, + &i.ReportConfigs, + &i.PlotType, + &i.Display, + ) + return i, err +} + +const plotConfigListForProject = `-- name: PlotConfigListForProject :many +select id, slug, name, project_id, created_by, created_at, updated_by, updated_at, show_masked, show_nonvalidated, show_comments, auto_range, date_range, threshold, report_configs, plot_type, display +from v_plot_configuration +where project_id = $1 +` + +func (q *Queries) PlotConfigListForProject(ctx context.Context, projectID uuid.UUID) ([]VPlotConfiguration, error) { + rows, err := q.db.Query(ctx, plotConfigListForProject, projectID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []VPlotConfiguration{} + for rows.Next() { + var i VPlotConfiguration + if err := rows.Scan( + &i.ID, + &i.Slug, + &i.Name, + &i.ProjectID, + &i.CreatedBy, + &i.CreatedAt, + &i.UpdatedBy, + &i.UpdatedAt, + &i.ShowMasked, + &i.ShowNonvalidated, + &i.ShowComments, + &i.AutoRange, + &i.DateRange, + &i.Threshold, + &i.ReportConfigs, + &i.PlotType, + &i.Display, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const plotConfigSettingsCreate = `-- name: PlotConfigSettingsCreate :exec +insert into plot_configuration_settings (id, show_masked, show_nonvalidated, show_comments, auto_range, date_range, threshold) +values ($1, $2, $3, $4, $5, $6, $7) +` + +type PlotConfigSettingsCreateParams struct { + ID uuid.UUID `json:"id"` + ShowMasked bool `json:"show_masked"` + ShowNonvalidated bool `json:"show_nonvalidated"` + ShowComments bool `json:"show_comments"` + AutoRange bool `json:"auto_range"` + DateRange string `json:"date_range"` + Threshold int32 `json:"threshold"` +} + +func (q *Queries) PlotConfigSettingsCreate(ctx context.Context, arg PlotConfigSettingsCreateParams) error { + _, err := q.db.Exec(ctx, plotConfigSettingsCreate, + arg.ID, + arg.ShowMasked, + arg.ShowNonvalidated, + arg.ShowComments, + arg.AutoRange, + arg.DateRange, + arg.Threshold, + ) + return err +} + +const plotConfigSettingsDelete = `-- name: PlotConfigSettingsDelete :exec +delete from plot_configuration_settings where id = $1 +` + +func (q *Queries) PlotConfigSettingsDelete(ctx context.Context, id uuid.UUID) error { + _, err := q.db.Exec(ctx, plotConfigSettingsDelete, id) + return err +} + +const plotConfigUpdate = `-- name: PlotConfigUpdate :exec +update plot_configuration set name = $3, updated_by = $4, updated_at = $5 where project_id = $1 and id = $2 +` + +type PlotConfigUpdateParams struct { + ProjectID uuid.UUID `json:"project_id"` + ID uuid.UUID `json:"id"` + Name string `json:"name"` + UpdatedBy *uuid.UUID `json:"updated_by"` + UpdatedAt *time.Time `json:"updated_at"` +} + +func (q *Queries) PlotConfigUpdate(ctx context.Context, arg PlotConfigUpdateParams) error { + _, err := q.db.Exec(ctx, plotConfigUpdate, + arg.ProjectID, + arg.ID, + arg.Name, + arg.UpdatedBy, + arg.UpdatedAt, + ) + return err +} diff --git a/api/internal/db/plot_config_bullseye.sql_gen.go b/api/internal/db/plot_config_bullseye.sql_gen.go new file mode 100644 index 00000000..df10849e --- /dev/null +++ b/api/internal/db/plot_config_bullseye.sql_gen.go @@ -0,0 +1,106 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.27.0 +// source: plot_config_bullseye.sql + +package db + +import ( + "context" + "time" + + "github.com/google/uuid" +) + +const plotBullseyeConfigCreate = `-- name: PlotBullseyeConfigCreate :exec +insert into plot_bullseye_config (plot_config_id, x_axis_timeseries_id, y_axis_timeseries_id) values ($1, $2, $3) +` + +type PlotBullseyeConfigCreateParams struct { + PlotConfigID uuid.UUID `json:"plot_config_id"` + XAxisTimeseriesID *uuid.UUID `json:"x_axis_timeseries_id"` + YAxisTimeseriesID *uuid.UUID `json:"y_axis_timeseries_id"` +} + +func (q *Queries) PlotBullseyeConfigCreate(ctx context.Context, arg PlotBullseyeConfigCreateParams) error { + _, err := q.db.Exec(ctx, plotBullseyeConfigCreate, arg.PlotConfigID, arg.XAxisTimeseriesID, arg.YAxisTimeseriesID) + return err +} + +const plotBullseyeConfigDelete = `-- name: PlotBullseyeConfigDelete :exec +delete from plot_bullseye_config where plot_config_id = $1 +` + +func (q *Queries) PlotBullseyeConfigDelete(ctx context.Context, plotConfigID uuid.UUID) error { + _, err := q.db.Exec(ctx, plotBullseyeConfigDelete, plotConfigID) + return err +} + +const plotBullseyeConfigUpdate = `-- name: PlotBullseyeConfigUpdate :exec +UPDATE plot_bullseye_config SET x_axis_timeseries_id=$2, y_axis_timeseries_id=$3 WHERE plot_config_id=$1 +` + +type PlotBullseyeConfigUpdateParams struct { + PlotConfigID uuid.UUID `json:"plot_config_id"` + XAxisTimeseriesID *uuid.UUID `json:"x_axis_timeseries_id"` + YAxisTimeseriesID *uuid.UUID `json:"y_axis_timeseries_id"` +} + +func (q *Queries) PlotBullseyeConfigUpdate(ctx context.Context, arg PlotBullseyeConfigUpdateParams) error { + _, err := q.db.Exec(ctx, plotBullseyeConfigUpdate, arg.PlotConfigID, arg.XAxisTimeseriesID, arg.YAxisTimeseriesID) + return err +} + +const plotConfigMeasurementListBullseye = `-- name: PlotConfigMeasurementListBullseye :many +select + t.time, + locf(xm.value) as x, + locf(ym.value) as y +from plot_bullseye_config pc +inner join timeseries_measurement t +on t.timeseries_id = pc.x_axis_timeseries_id +or t.timeseries_id = pc.y_axis_timeseries_id +left join timeseries_measurement xm +on xm.timeseries_id = pc.x_axis_timeseries_id +and xm.time = t.time +left join timeseries_measurement ym +on ym.timeseries_id = pc.y_axis_timeseries_id +and ym.time = t.time +where pc.plot_config_id = $1 +and t.time > $2 +and t.time < $3 +group by t.time +order by t.time asc +` + +type PlotConfigMeasurementListBullseyeParams struct { + PlotConfigID uuid.UUID `json:"plot_config_id"` + After time.Time `json:"after"` + Before time.Time `json:"before"` +} + +type PlotConfigMeasurementListBullseyeRow struct { + Time time.Time `json:"time"` + X interface{} `json:"x"` + Y interface{} `json:"y"` +} + +func (q *Queries) PlotConfigMeasurementListBullseye(ctx context.Context, arg PlotConfigMeasurementListBullseyeParams) ([]PlotConfigMeasurementListBullseyeRow, error) { + rows, err := q.db.Query(ctx, plotConfigMeasurementListBullseye, arg.PlotConfigID, arg.After, arg.Before) + if err != nil { + return nil, err + } + defer rows.Close() + items := []PlotConfigMeasurementListBullseyeRow{} + for rows.Next() { + var i PlotConfigMeasurementListBullseyeRow + if err := rows.Scan(&i.Time, &i.X, &i.Y); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} diff --git a/api/internal/db/plot_config_contour.sql_gen.go b/api/internal/db/plot_config_contour.sql_gen.go new file mode 100644 index 00000000..f5180365 --- /dev/null +++ b/api/internal/db/plot_config_contour.sql_gen.go @@ -0,0 +1,186 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.27.0 +// source: plot_config_contour.sql + +package db + +import ( + "context" + "time" + + "github.com/google/uuid" +) + +const plotConfigMeasurementListContour = `-- name: PlotConfigMeasurementListContour :many +select + oi.x::double precision x, + oi.y::double precision y, + locf(mm.value) z +from plot_contour_config pc +left join plot_contour_config_timeseries pcts on pcts.plot_contour_config_id = pc.plot_config_id +left join timeseries_measurement mm on mm.timeseries_id = pcts.timeseries_id +inner join timeseries ts on ts.id = pcts.timeseries_id +inner join ( + select + ii.id, + st_x(st_centroid(ii.geometry)) as x, + st_y(st_centroid(ii.geometry)) as y + from instrument ii +) oi on oi.id = ts.instrument_id +where plot_config_id = $1 +and mm.time = $2 +group by pc.plot_config_id, pcts.timeseries_id, oi.x, oi.y +` + +type PlotConfigMeasurementListContourParams struct { + PlotConfigID uuid.UUID `json:"plot_config_id"` + Time time.Time `json:"time"` +} + +type PlotConfigMeasurementListContourRow struct { + X float64 `json:"x"` + Y float64 `json:"y"` + Z interface{} `json:"z"` +} + +func (q *Queries) PlotConfigMeasurementListContour(ctx context.Context, arg PlotConfigMeasurementListContourParams) ([]PlotConfigMeasurementListContourRow, error) { + rows, err := q.db.Query(ctx, plotConfigMeasurementListContour, arg.PlotConfigID, arg.Time) + if err != nil { + return nil, err + } + defer rows.Close() + items := []PlotConfigMeasurementListContourRow{} + for rows.Next() { + var i PlotConfigMeasurementListContourRow + if err := rows.Scan(&i.X, &i.Y, &i.Z); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const plotContourConfigCreate = `-- name: PlotContourConfigCreate :exec +insert into plot_contour_config (plot_config_id, "time", locf_backfill, gradient_smoothing, contour_smoothing, show_labels) +values ($1, $2, $3, $4, $5, $6) +` + +type PlotContourConfigCreateParams struct { + PlotConfigID uuid.UUID `json:"plot_config_id"` + Time *time.Time `json:"time"` + LocfBackfill string `json:"locf_backfill"` + GradientSmoothing bool `json:"gradient_smoothing"` + ContourSmoothing bool `json:"contour_smoothing"` + ShowLabels bool `json:"show_labels"` +} + +func (q *Queries) PlotContourConfigCreate(ctx context.Context, arg PlotContourConfigCreateParams) error { + _, err := q.db.Exec(ctx, plotContourConfigCreate, + arg.PlotConfigID, + arg.Time, + arg.LocfBackfill, + arg.GradientSmoothing, + arg.ContourSmoothing, + arg.ShowLabels, + ) + return err +} + +const plotContourConfigDelete = `-- name: PlotContourConfigDelete :exec +delete from plot_contour_config where plot_config_id = $1 +` + +func (q *Queries) PlotContourConfigDelete(ctx context.Context, plotConfigID uuid.UUID) error { + _, err := q.db.Exec(ctx, plotContourConfigDelete, plotConfigID) + return err +} + +const plotContourConfigListTimeRange = `-- name: PlotContourConfigListTimeRange :many +select distinct mm.time +from plot_contour_config_timeseries pcts +inner join timeseries_measurement mm on mm.timeseries_id = pcts.timeseries_id +where pcts.plot_contour_config_id = $1 +and mm.time > $2 +and mm.time < $3 +order by time asc +` + +type PlotContourConfigListTimeRangeParams struct { + PlotContourConfigID uuid.UUID `json:"plot_contour_config_id"` + After time.Time `json:"after"` + Before time.Time `json:"before"` +} + +func (q *Queries) PlotContourConfigListTimeRange(ctx context.Context, arg PlotContourConfigListTimeRangeParams) ([]time.Time, error) { + rows, err := q.db.Query(ctx, plotContourConfigListTimeRange, arg.PlotContourConfigID, arg.After, arg.Before) + if err != nil { + return nil, err + } + defer rows.Close() + items := []time.Time{} + for rows.Next() { + var time time.Time + if err := rows.Scan(&time); err != nil { + return nil, err + } + items = append(items, time) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const plotContourConfigTimeseriesCreate = `-- name: PlotContourConfigTimeseriesCreate :exec +insert into plot_contour_config_timeseries (plot_contour_config_id, timeseries_id) values ($1, $2) +on conflict (plot_contour_config_id, timeseries_id) do nothing +` + +type PlotContourConfigTimeseriesCreateParams struct { + PlotContourConfigID uuid.UUID `json:"plot_contour_config_id"` + TimeseriesID uuid.UUID `json:"timeseries_id"` +} + +func (q *Queries) PlotContourConfigTimeseriesCreate(ctx context.Context, arg PlotContourConfigTimeseriesCreateParams) error { + _, err := q.db.Exec(ctx, plotContourConfigTimeseriesCreate, arg.PlotContourConfigID, arg.TimeseriesID) + return err +} + +const plotContourConfigTimeseriesDeleteForPlotContourConfig = `-- name: PlotContourConfigTimeseriesDeleteForPlotContourConfig :exec +delete from plot_contour_config_timeseries where plot_contour_config_id = $1 +` + +func (q *Queries) PlotContourConfigTimeseriesDeleteForPlotContourConfig(ctx context.Context, plotContourConfigID uuid.UUID) error { + _, err := q.db.Exec(ctx, plotContourConfigTimeseriesDeleteForPlotContourConfig, plotContourConfigID) + return err +} + +const plotContourConfigUpdate = `-- name: PlotContourConfigUpdate :exec +update plot_contour_config set "time"=$2, locf_backfill=$3, gradient_smoothing=$4, contour_smoothing=$5, show_labels=$6 +where plot_config_id=$1 +` + +type PlotContourConfigUpdateParams struct { + PlotConfigID uuid.UUID `json:"plot_config_id"` + Time *time.Time `json:"time"` + LocfBackfill string `json:"locf_backfill"` + GradientSmoothing bool `json:"gradient_smoothing"` + ContourSmoothing bool `json:"contour_smoothing"` + ShowLabels bool `json:"show_labels"` +} + +func (q *Queries) PlotContourConfigUpdate(ctx context.Context, arg PlotContourConfigUpdateParams) error { + _, err := q.db.Exec(ctx, plotContourConfigUpdate, + arg.PlotConfigID, + arg.Time, + arg.LocfBackfill, + arg.GradientSmoothing, + arg.ContourSmoothing, + arg.ShowLabels, + ) + return err +} diff --git a/api/internal/db/plot_config_profile.sql_gen.go b/api/internal/db/plot_config_profile.sql_gen.go new file mode 100644 index 00000000..39b9708c --- /dev/null +++ b/api/internal/db/plot_config_profile.sql_gen.go @@ -0,0 +1,40 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.27.0 +// source: plot_config_profile.sql + +package db + +import ( + "context" + + "github.com/google/uuid" +) + +const plotProfileConfigCreate = `-- name: PlotProfileConfigCreate :exec +insert into plot_profile_config (plot_config_id, instrument_id) values ($1, $2) +` + +type PlotProfileConfigCreateParams struct { + PlotConfigID uuid.UUID `json:"plot_config_id"` + InstrumentID uuid.UUID `json:"instrument_id"` +} + +func (q *Queries) PlotProfileConfigCreate(ctx context.Context, arg PlotProfileConfigCreateParams) error { + _, err := q.db.Exec(ctx, plotProfileConfigCreate, arg.PlotConfigID, arg.InstrumentID) + return err +} + +const plotProfileConfigUpdate = `-- name: PlotProfileConfigUpdate :exec +update plot_profile_config set instrument_id=$2 where plot_config_id=$1 +` + +type PlotProfileConfigUpdateParams struct { + PlotConfigID uuid.UUID `json:"plot_config_id"` + InstrumentID uuid.UUID `json:"instrument_id"` +} + +func (q *Queries) PlotProfileConfigUpdate(ctx context.Context, arg PlotProfileConfigUpdateParams) error { + _, err := q.db.Exec(ctx, plotProfileConfigUpdate, arg.PlotConfigID, arg.InstrumentID) + return err +} diff --git a/api/internal/db/plot_config_scatter_line.sql_gen.go b/api/internal/db/plot_config_scatter_line.sql_gen.go new file mode 100644 index 00000000..0398539a --- /dev/null +++ b/api/internal/db/plot_config_scatter_line.sql_gen.go @@ -0,0 +1,170 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.27.0 +// source: plot_config_scatter_line.sql + +package db + +import ( + "context" + + "github.com/google/uuid" +) + +const plotConfigCustomShapeCreate = `-- name: PlotConfigCustomShapeCreate :exec +insert into plot_configuration_custom_shape +(plot_configuration_id, enabled, name, data_point, color) values ($1, $2, $3, $4, $5) +` + +type PlotConfigCustomShapeCreateParams struct { + PlotConfigurationID *uuid.UUID `json:"plot_configuration_id"` + Enabled bool `json:"enabled"` + Name string `json:"name"` + DataPoint float32 `json:"data_point"` + Color string `json:"color"` +} + +func (q *Queries) PlotConfigCustomShapeCreate(ctx context.Context, arg PlotConfigCustomShapeCreateParams) error { + _, err := q.db.Exec(ctx, plotConfigCustomShapeCreate, + arg.PlotConfigurationID, + arg.Enabled, + arg.Name, + arg.DataPoint, + arg.Color, + ) + return err +} + +const plotConfigCustomShapeDeleteForPlotConfig = `-- name: PlotConfigCustomShapeDeleteForPlotConfig :exec +delete from plot_configuration_custom_shape where plot_configuration_id=$1 +` + +func (q *Queries) PlotConfigCustomShapeDeleteForPlotConfig(ctx context.Context, plotConfigurationID *uuid.UUID) error { + _, err := q.db.Exec(ctx, plotConfigCustomShapeDeleteForPlotConfig, plotConfigurationID) + return err +} + +const plotConfigCustomShapeUpdate = `-- name: PlotConfigCustomShapeUpdate :exec +update plot_configuration_custom_shape +set enabled=$2, name=$3, data_point=$4, color=$5 where plot_configuration_id=$1 +` + +type PlotConfigCustomShapeUpdateParams struct { + PlotConfigurationID *uuid.UUID `json:"plot_configuration_id"` + Enabled bool `json:"enabled"` + Name string `json:"name"` + DataPoint float32 `json:"data_point"` + Color string `json:"color"` +} + +func (q *Queries) PlotConfigCustomShapeUpdate(ctx context.Context, arg PlotConfigCustomShapeUpdateParams) error { + _, err := q.db.Exec(ctx, plotConfigCustomShapeUpdate, + arg.PlotConfigurationID, + arg.Enabled, + arg.Name, + arg.DataPoint, + arg.Color, + ) + return err +} + +const plotConfigScatterLineLayoutCreate = `-- name: PlotConfigScatterLineLayoutCreate :exec +insert into plot_scatter_line_config (plot_config_id, y_axis_title, y2_axis_title) values ($1, $2, $3) +` + +type PlotConfigScatterLineLayoutCreateParams struct { + PlotConfigID uuid.UUID `json:"plot_config_id"` + YAxisTitle *string `json:"y_axis_title"` + Y2AxisTitle *string `json:"y2_axis_title"` +} + +func (q *Queries) PlotConfigScatterLineLayoutCreate(ctx context.Context, arg PlotConfigScatterLineLayoutCreateParams) error { + _, err := q.db.Exec(ctx, plotConfigScatterLineLayoutCreate, arg.PlotConfigID, arg.YAxisTitle, arg.Y2AxisTitle) + return err +} + +const plotConfigScatterLineLayoutUpdate = `-- name: PlotConfigScatterLineLayoutUpdate :exec +update plot_scatter_line_config set y_axis_title=$2, y2_axis_title=$3 where plot_config_id=$1 +` + +type PlotConfigScatterLineLayoutUpdateParams struct { + PlotConfigID uuid.UUID `json:"plot_config_id"` + YAxisTitle *string `json:"y_axis_title"` + Y2AxisTitle *string `json:"y2_axis_title"` +} + +func (q *Queries) PlotConfigScatterLineLayoutUpdate(ctx context.Context, arg PlotConfigScatterLineLayoutUpdateParams) error { + _, err := q.db.Exec(ctx, plotConfigScatterLineLayoutUpdate, arg.PlotConfigID, arg.YAxisTitle, arg.Y2AxisTitle) + return err +} + +const plotConfigTimeseriesTraceCreate = `-- name: PlotConfigTimeseriesTraceCreate :exec +insert into plot_configuration_timeseries_trace +(plot_configuration_id, timeseries_id, trace_order, color, line_style, width, show_markers, y_axis) values +($1, $2, $3, $4, $5, $6, $7, $8) +` + +type PlotConfigTimeseriesTraceCreateParams struct { + PlotConfigurationID *uuid.UUID `json:"plot_configuration_id"` + TimeseriesID *uuid.UUID `json:"timeseries_id"` + TraceOrder int32 `json:"trace_order"` + Color string `json:"color"` + LineStyle LineStyle `json:"line_style"` + Width float32 `json:"width"` + ShowMarkers bool `json:"show_markers"` + YAxis YAxis `json:"y_axis"` +} + +func (q *Queries) PlotConfigTimeseriesTraceCreate(ctx context.Context, arg PlotConfigTimeseriesTraceCreateParams) error { + _, err := q.db.Exec(ctx, plotConfigTimeseriesTraceCreate, + arg.PlotConfigurationID, + arg.TimeseriesID, + arg.TraceOrder, + arg.Color, + arg.LineStyle, + arg.Width, + arg.ShowMarkers, + arg.YAxis, + ) + return err +} + +const plotConfigTimeseriesTraceDeleteForPlotConfig = `-- name: PlotConfigTimeseriesTraceDeleteForPlotConfig :exec +delete from plot_configuration_timeseries_trace where plot_configuration_id=$1 +` + +func (q *Queries) PlotConfigTimeseriesTraceDeleteForPlotConfig(ctx context.Context, plotConfigurationID *uuid.UUID) error { + _, err := q.db.Exec(ctx, plotConfigTimeseriesTraceDeleteForPlotConfig, plotConfigurationID) + return err +} + +const plotConfigTimeseriesTraceUpdate = `-- name: PlotConfigTimeseriesTraceUpdate :exec +update plot_configuration_timeseries_trace +set trace_order=$3, color=$4, line_style=$5, width=$6, show_markers=$7, y_axis=$8 +where plot_configuration_id=$1 and timeseries_id=$2 +` + +type PlotConfigTimeseriesTraceUpdateParams struct { + PlotConfigurationID *uuid.UUID `json:"plot_configuration_id"` + TimeseriesID *uuid.UUID `json:"timeseries_id"` + TraceOrder int32 `json:"trace_order"` + Color string `json:"color"` + LineStyle LineStyle `json:"line_style"` + Width float32 `json:"width"` + ShowMarkers bool `json:"show_markers"` + YAxis YAxis `json:"y_axis"` +} + +func (q *Queries) PlotConfigTimeseriesTraceUpdate(ctx context.Context, arg PlotConfigTimeseriesTraceUpdateParams) error { + _, err := q.db.Exec(ctx, plotConfigTimeseriesTraceUpdate, + arg.PlotConfigurationID, + arg.TimeseriesID, + arg.TraceOrder, + arg.Color, + arg.LineStyle, + arg.Width, + arg.ShowMarkers, + arg.YAxis, + ) + return err +} diff --git a/api/internal/db/profile.sql_gen.go b/api/internal/db/profile.sql_gen.go new file mode 100644 index 00000000..ef522030 --- /dev/null +++ b/api/internal/db/profile.sql_gen.go @@ -0,0 +1,273 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.27.0 +// source: profile.sql + +package db + +import ( + "context" + "time" + + "github.com/google/uuid" +) + +const profileCreate = `-- name: ProfileCreate :one +insert into profile (edipi, username, email, display_name) values ($1, $2, $3, $4) returning id, username, email, display_name +` + +type ProfileCreateParams struct { + Edipi int64 `json:"edipi"` + Username string `json:"username"` + Email string `json:"email"` + DisplayName string `json:"display_name"` +} + +type ProfileCreateRow struct { + ID uuid.UUID `json:"id"` + Username string `json:"username"` + Email string `json:"email"` + DisplayName string `json:"display_name"` +} + +func (q *Queries) ProfileCreate(ctx context.Context, arg ProfileCreateParams) (ProfileCreateRow, error) { + row := q.db.QueryRow(ctx, profileCreate, + arg.Edipi, + arg.Username, + arg.Email, + arg.DisplayName, + ) + var i ProfileCreateRow + err := row.Scan( + &i.ID, + &i.Username, + &i.Email, + &i.DisplayName, + ) + return i, err +} + +const profileGetForEDIPI = `-- name: ProfileGetForEDIPI :one +select id, edipi, username, display_name, email, is_admin, roles, tokens from v_profile where edipi = $1 +` + +func (q *Queries) ProfileGetForEDIPI(ctx context.Context, edipi int64) (VProfile, error) { + row := q.db.QueryRow(ctx, profileGetForEDIPI, edipi) + var i VProfile + err := row.Scan( + &i.ID, + &i.Edipi, + &i.Username, + &i.DisplayName, + &i.Email, + &i.IsAdmin, + &i.Roles, + &i.Tokens, + ) + return i, err +} + +const profileGetForEmail = `-- name: ProfileGetForEmail :one +select id, edipi, username, display_name, email, is_admin, roles, tokens from v_profile where email ilike $1 +limit 1 +` + +func (q *Queries) ProfileGetForEmail(ctx context.Context, email string) (VProfile, error) { + row := q.db.QueryRow(ctx, profileGetForEmail, email) + var i VProfile + err := row.Scan( + &i.ID, + &i.Edipi, + &i.Username, + &i.DisplayName, + &i.Email, + &i.IsAdmin, + &i.Roles, + &i.Tokens, + ) + return i, err +} + +const profileGetForToken = `-- name: ProfileGetForToken :one +select p.id, p.edipi, p.username, p.email, p.is_admin +from profile_token t +left join v_profile p on p.id = t.profile_id +where t.token_id = $1 +limit 1 +` + +type ProfileGetForTokenRow struct { + ID *uuid.UUID `json:"id"` + Edipi *int64 `json:"edipi"` + Username *string `json:"username"` + Email *string `json:"email"` + IsAdmin *bool `json:"is_admin"` +} + +func (q *Queries) ProfileGetForToken(ctx context.Context, tokenID string) (ProfileGetForTokenRow, error) { + row := q.db.QueryRow(ctx, profileGetForToken, tokenID) + var i ProfileGetForTokenRow + err := row.Scan( + &i.ID, + &i.Edipi, + &i.Username, + &i.Email, + &i.IsAdmin, + ) + return i, err +} + +const profileGetForUsername = `-- name: ProfileGetForUsername :one +select id, edipi, username, display_name, email, is_admin, roles, tokens from v_profile where username = $1 +limit 1 +` + +func (q *Queries) ProfileGetForUsername(ctx context.Context, username string) (VProfile, error) { + row := q.db.QueryRow(ctx, profileGetForUsername, username) + var i VProfile + err := row.Scan( + &i.ID, + &i.Edipi, + &i.Username, + &i.DisplayName, + &i.Email, + &i.IsAdmin, + &i.Roles, + &i.Tokens, + ) + return i, err +} + +const profileTokenCreate = `-- name: ProfileTokenCreate :one +insert into profile_token (token_id, profile_id, hash) values ($1,$2,$3) returning id, token_id, profile_id, issued, hash +` + +type ProfileTokenCreateParams struct { + TokenID string `json:"token_id"` + ProfileID uuid.UUID `json:"profile_id"` + Hash string `json:"hash"` +} + +func (q *Queries) ProfileTokenCreate(ctx context.Context, arg ProfileTokenCreateParams) (ProfileToken, error) { + row := q.db.QueryRow(ctx, profileTokenCreate, arg.TokenID, arg.ProfileID, arg.Hash) + var i ProfileToken + err := row.Scan( + &i.ID, + &i.TokenID, + &i.ProfileID, + &i.Issued, + &i.Hash, + ) + return i, err +} + +const profileTokenDelete = `-- name: ProfileTokenDelete :exec +delete from profile_token where profile_id=$1 and token_id=$2 +` + +type ProfileTokenDeleteParams struct { + ProfileID uuid.UUID `json:"profile_id"` + TokenID string `json:"token_id"` +} + +func (q *Queries) ProfileTokenDelete(ctx context.Context, arg ProfileTokenDeleteParams) error { + _, err := q.db.Exec(ctx, profileTokenDelete, arg.ProfileID, arg.TokenID) + return err +} + +const profileTokenGet = `-- name: ProfileTokenGet :one +select id, token_id, profile_id, issued, hash from profile_token where token_id=$1 limit 1 +` + +func (q *Queries) ProfileTokenGet(ctx context.Context, tokenID string) (ProfileToken, error) { + row := q.db.QueryRow(ctx, profileTokenGet, tokenID) + var i ProfileToken + err := row.Scan( + &i.ID, + &i.TokenID, + &i.ProfileID, + &i.Issued, + &i.Hash, + ) + return i, err +} + +const profileTokenList = `-- name: ProfileTokenList :many +select token_id, issued from profile_token where profile_id = $1 +` + +type ProfileTokenListRow struct { + TokenID string `json:"token_id"` + Issued time.Time `json:"issued"` +} + +func (q *Queries) ProfileTokenList(ctx context.Context, profileID uuid.UUID) ([]ProfileTokenListRow, error) { + rows, err := q.db.Query(ctx, profileTokenList, profileID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []ProfileTokenListRow{} + for rows.Next() { + var i ProfileTokenListRow + if err := rows.Scan(&i.TokenID, &i.Issued); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const profileUpdateForEDIPI = `-- name: ProfileUpdateForEDIPI :exec +UPDATE profile SET username=$1, email=$2, display_name=$3 WHERE edipi=$4 +` + +type ProfileUpdateForEDIPIParams struct { + Username string `json:"username"` + Email string `json:"email"` + DisplayName string `json:"display_name"` + Edipi int64 `json:"edipi"` +} + +func (q *Queries) ProfileUpdateForEDIPI(ctx context.Context, arg ProfileUpdateForEDIPIParams) error { + _, err := q.db.Exec(ctx, profileUpdateForEDIPI, + arg.Username, + arg.Email, + arg.DisplayName, + arg.Edipi, + ) + return err +} + +const profileUpdateForEmail = `-- name: ProfileUpdateForEmail :exec +update profile set username=$1, display_name=$2 where email ilike $3 +` + +type ProfileUpdateForEmailParams struct { + Username string `json:"username"` + DisplayName string `json:"display_name"` + Email string `json:"email"` +} + +func (q *Queries) ProfileUpdateForEmail(ctx context.Context, arg ProfileUpdateForEmailParams) error { + _, err := q.db.Exec(ctx, profileUpdateForEmail, arg.Username, arg.DisplayName, arg.Email) + return err +} + +const profileUpdateForUsername = `-- name: ProfileUpdateForUsername :exec +update profile set email=$1, display_name=$2 where username=$3 +` + +type ProfileUpdateForUsernameParams struct { + Email string `json:"email"` + DisplayName string `json:"display_name"` + Username string `json:"username"` +} + +func (q *Queries) ProfileUpdateForUsername(ctx context.Context, arg ProfileUpdateForUsernameParams) error { + _, err := q.db.Exec(ctx, profileUpdateForUsername, arg.Email, arg.DisplayName, arg.Username) + return err +} diff --git a/api/internal/db/project.sql_gen.go b/api/internal/db/project.sql_gen.go new file mode 100644 index 00000000..98596096 --- /dev/null +++ b/api/internal/db/project.sql_gen.go @@ -0,0 +1,338 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.27.0 +// source: project.sql + +package db + +import ( + "context" + "time" + + "github.com/google/uuid" +) + +const districtList = `-- name: DistrictList :many +select agency, id, name, initials, division_name, division_initials, office_id from v_district +` + +func (q *Queries) DistrictList(ctx context.Context) ([]VDistrict, error) { + rows, err := q.db.Query(ctx, districtList) + if err != nil { + return nil, err + } + defer rows.Close() + items := []VDistrict{} + for rows.Next() { + var i VDistrict + if err := rows.Scan( + &i.Agency, + &i.ID, + &i.Name, + &i.Initials, + &i.DivisionName, + &i.DivisionInitials, + &i.OfficeID, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const projectDeleteFlag = `-- name: ProjectDeleteFlag :exec +update project set deleted=true where id = $1 +` + +func (q *Queries) ProjectDeleteFlag(ctx context.Context, id uuid.UUID) error { + _, err := q.db.Exec(ctx, projectDeleteFlag, id) + return err +} + +const projectGet = `-- name: ProjectGet :one +select id, federal_id, image, district_id, office_id, slug, name, created_by, created_by_username, created_at, updated_by, updated_by_username, updated_at, instrument_count, instrument_group_count from v_project where id = $1 +` + +func (q *Queries) ProjectGet(ctx context.Context, id uuid.UUID) (VProject, error) { + row := q.db.QueryRow(ctx, projectGet, id) + var i VProject + err := row.Scan( + &i.ID, + &i.FederalID, + &i.Image, + &i.DistrictID, + &i.OfficeID, + &i.Slug, + &i.Name, + &i.CreatedBy, + &i.CreatedByUsername, + &i.CreatedAt, + &i.UpdatedBy, + &i.UpdatedByUsername, + &i.UpdatedAt, + &i.InstrumentCount, + &i.InstrumentGroupCount, + ) + return i, err +} + +const projectGetCount = `-- name: ProjectGetCount :one +select count(*) from project where not deleted +` + +func (q *Queries) ProjectGetCount(ctx context.Context) (int64, error) { + row := q.db.QueryRow(ctx, projectGetCount) + var count int64 + err := row.Scan(&count) + return count, err +} + +const projectList = `-- name: ProjectList :many +select id, federal_id, image, district_id, office_id, slug, name, created_by, created_by_username, created_at, updated_by, updated_by_username, updated_at, instrument_count, instrument_group_count from v_project +` + +func (q *Queries) ProjectList(ctx context.Context) ([]VProject, error) { + rows, err := q.db.Query(ctx, projectList) + if err != nil { + return nil, err + } + defer rows.Close() + items := []VProject{} + for rows.Next() { + var i VProject + if err := rows.Scan( + &i.ID, + &i.FederalID, + &i.Image, + &i.DistrictID, + &i.OfficeID, + &i.Slug, + &i.Name, + &i.CreatedBy, + &i.CreatedByUsername, + &i.CreatedAt, + &i.UpdatedBy, + &i.UpdatedByUsername, + &i.UpdatedAt, + &i.InstrumentCount, + &i.InstrumentGroupCount, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const projectListForFederalID = `-- name: ProjectListForFederalID :many +select id, federal_id, image, district_id, office_id, slug, name, created_by, created_by_username, created_at, updated_by, updated_by_username, updated_at, instrument_count, instrument_group_count from v_project +where federal_id = $1 +` + +func (q *Queries) ProjectListForFederalID(ctx context.Context, federalID *string) ([]VProject, error) { + rows, err := q.db.Query(ctx, projectListForFederalID, federalID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []VProject{} + for rows.Next() { + var i VProject + if err := rows.Scan( + &i.ID, + &i.FederalID, + &i.Image, + &i.DistrictID, + &i.OfficeID, + &i.Slug, + &i.Name, + &i.CreatedBy, + &i.CreatedByUsername, + &i.CreatedAt, + &i.UpdatedBy, + &i.UpdatedByUsername, + &i.UpdatedAt, + &i.InstrumentCount, + &i.InstrumentGroupCount, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const projectListForNameSearch = `-- name: ProjectListForNameSearch :many +select id, federal_id, image, district_id, office_id, slug, name, created_by, created_by_username, created_at, updated_by, updated_by_username, updated_at, instrument_count, instrument_group_count from v_project +where name ilike '%'||$1||'%' +limit $2 +` + +type ProjectListForNameSearchParams struct { + Name *string `json:"name"` + ResultLimit int32 `json:"result_limit"` +} + +func (q *Queries) ProjectListForNameSearch(ctx context.Context, arg ProjectListForNameSearchParams) ([]VProject, error) { + rows, err := q.db.Query(ctx, projectListForNameSearch, arg.Name, arg.ResultLimit) + if err != nil { + return nil, err + } + defer rows.Close() + items := []VProject{} + for rows.Next() { + var i VProject + if err := rows.Scan( + &i.ID, + &i.FederalID, + &i.Image, + &i.DistrictID, + &i.OfficeID, + &i.Slug, + &i.Name, + &i.CreatedBy, + &i.CreatedByUsername, + &i.CreatedAt, + &i.UpdatedBy, + &i.UpdatedByUsername, + &i.UpdatedAt, + &i.InstrumentCount, + &i.InstrumentGroupCount, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const projectListForProfileAdmin = `-- name: ProjectListForProfileAdmin :many +select pr.project_id from profile_project_roles pr +inner join role ro on ro.id = pr.role_id +where pr.profile_id = $1 +and ro.name = 'ADMIN' +` + +func (q *Queries) ProjectListForProfileAdmin(ctx context.Context, profileID uuid.UUID) ([]uuid.UUID, error) { + rows, err := q.db.Query(ctx, projectListForProfileAdmin, profileID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []uuid.UUID{} + for rows.Next() { + var project_id uuid.UUID + if err := rows.Scan(&project_id); err != nil { + return nil, err + } + items = append(items, project_id) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const projectListForProfileRole = `-- name: ProjectListForProfileRole :many +select p.id, p.federal_id, p.image, p.district_id, p.office_id, p.slug, p.name, p.created_by, p.created_by_username, p.created_at, p.updated_by, p.updated_by_username, p.updated_at, p.instrument_count, p.instrument_group_count +from v_project p +inner join profile_project_roles pr on pr.project_id = p.id +inner join role r on r.id = pr.role_id +where pr.profile_id = $1 +and r.name = $2 +` + +type ProjectListForProfileRoleParams struct { + ProfileID uuid.UUID `json:"profile_id"` + Name string `json:"name"` +} + +func (q *Queries) ProjectListForProfileRole(ctx context.Context, arg ProjectListForProfileRoleParams) ([]VProject, error) { + rows, err := q.db.Query(ctx, projectListForProfileRole, arg.ProfileID, arg.Name) + if err != nil { + return nil, err + } + defer rows.Close() + items := []VProject{} + for rows.Next() { + var i VProject + if err := rows.Scan( + &i.ID, + &i.FederalID, + &i.Image, + &i.DistrictID, + &i.OfficeID, + &i.Slug, + &i.Name, + &i.CreatedBy, + &i.CreatedByUsername, + &i.CreatedAt, + &i.UpdatedBy, + &i.UpdatedByUsername, + &i.UpdatedAt, + &i.InstrumentCount, + &i.InstrumentGroupCount, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const projectUpdate = `-- name: ProjectUpdate :one +update project set name=$2, updated_by=$3, updated_at=$4, district_id=$5, federal_id=$6 where id=$1 returning id +` + +type ProjectUpdateParams struct { + ID uuid.UUID `json:"id"` + Name string `json:"name"` + UpdatedBy *uuid.UUID `json:"updated_by"` + UpdatedAt *time.Time `json:"updated_at"` + DistrictID *uuid.UUID `json:"district_id"` + FederalID *string `json:"federal_id"` +} + +func (q *Queries) ProjectUpdate(ctx context.Context, arg ProjectUpdateParams) (uuid.UUID, error) { + row := q.db.QueryRow(ctx, projectUpdate, + arg.ID, + arg.Name, + arg.UpdatedBy, + arg.UpdatedAt, + arg.DistrictID, + arg.FederalID, + ) + var id uuid.UUID + err := row.Scan(&id) + return id, err +} + +const projectUpdateImage = `-- name: ProjectUpdateImage :exec +update project set image = $1 where id = $2 +` + +type ProjectUpdateImageParams struct { + Image *string `json:"image"` + ID uuid.UUID `json:"id"` +} + +func (q *Queries) ProjectUpdateImage(ctx context.Context, arg ProjectUpdateImageParams) error { + _, err := q.db.Exec(ctx, projectUpdateImage, arg.Image, arg.ID) + return err +} diff --git a/api/internal/db/project_role.sql_gen.go b/api/internal/db/project_role.sql_gen.go new file mode 100644 index 00000000..64f0989f --- /dev/null +++ b/api/internal/db/project_role.sql_gen.go @@ -0,0 +1,169 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.27.0 +// source: project_role.sql + +package db + +import ( + "context" + + "github.com/google/uuid" +) + +const profileProjectRoleCreate = `-- name: ProfileProjectRoleCreate :one +insert into profile_project_roles (project_id, profile_id, role_id, granted_by) +values ($1, $2, $3, $4) +on conflict on constraint unique_profile_project_role do update set project_id = excluded.project_id +returning id +` + +type ProfileProjectRoleCreateParams struct { + ProjectID uuid.UUID `json:"project_id"` + ProfileID uuid.UUID `json:"profile_id"` + RoleID uuid.UUID `json:"role_id"` + GrantedBy *uuid.UUID `json:"granted_by"` +} + +func (q *Queries) ProfileProjectRoleCreate(ctx context.Context, arg ProfileProjectRoleCreateParams) (uuid.UUID, error) { + row := q.db.QueryRow(ctx, profileProjectRoleCreate, + arg.ProjectID, + arg.ProfileID, + arg.RoleID, + arg.GrantedBy, + ) + var id uuid.UUID + err := row.Scan(&id) + return id, err +} + +const profileProjectRoleDelete = `-- name: ProfileProjectRoleDelete :exec +delete from profile_project_roles where project_id = $1 and profile_id = $2 and role_id = $3 +` + +type ProfileProjectRoleDeleteParams struct { + ProjectID uuid.UUID `json:"project_id"` + ProfileID uuid.UUID `json:"profile_id"` + RoleID uuid.UUID `json:"role_id"` +} + +func (q *Queries) ProfileProjectRoleDelete(ctx context.Context, arg ProfileProjectRoleDeleteParams) error { + _, err := q.db.Exec(ctx, profileProjectRoleDelete, arg.ProjectID, arg.ProfileID, arg.RoleID) + return err +} + +const profileProjectRoleGet = `-- name: ProfileProjectRoleGet :one +select id, profile_id, username, email, role_id, role +from v_profile_project_roles +where id = $1 +` + +type ProfileProjectRoleGetRow struct { + ID uuid.UUID `json:"id"` + ProfileID uuid.UUID `json:"profile_id"` + Username string `json:"username"` + Email string `json:"email"` + RoleID uuid.UUID `json:"role_id"` + Role string `json:"role"` +} + +func (q *Queries) ProfileProjectRoleGet(ctx context.Context, id uuid.UUID) (ProfileProjectRoleGetRow, error) { + row := q.db.QueryRow(ctx, profileProjectRoleGet, id) + var i ProfileProjectRoleGetRow + err := row.Scan( + &i.ID, + &i.ProfileID, + &i.Username, + &i.Email, + &i.RoleID, + &i.Role, + ) + return i, err +} + +const profileProjectRoleGetIsAdmin = `-- name: ProfileProjectRoleGetIsAdmin :one +select exists ( + select 1 from profile_project_roles pr + inner join role r on r.id = pr.role_id + where pr.profile_id = $1 + and pr.project_id = $2 + and r.name = 'ADMIN' +) +` + +type ProfileProjectRoleGetIsAdminParams struct { + ProfileID uuid.UUID `json:"profile_id"` + ProjectID uuid.UUID `json:"project_id"` +} + +func (q *Queries) ProfileProjectRoleGetIsAdmin(ctx context.Context, arg ProfileProjectRoleGetIsAdminParams) (bool, error) { + row := q.db.QueryRow(ctx, profileProjectRoleGetIsAdmin, arg.ProfileID, arg.ProjectID) + var exists bool + err := row.Scan(&exists) + return exists, err +} + +const profileProjectRoleGetIsMemberOrAdmin = `-- name: ProfileProjectRoleGetIsMemberOrAdmin :one +select exists ( + select 1 from profile_project_roles pr + inner join role r on r.id = pr.role_id + where pr.profile_id = $1 + and pr.project_id = $2 + and (r.name = 'MEMBER' or r.name = 'ADMIN') +) +` + +type ProfileProjectRoleGetIsMemberOrAdminParams struct { + ProfileID uuid.UUID `json:"profile_id"` + ProjectID uuid.UUID `json:"project_id"` +} + +func (q *Queries) ProfileProjectRoleGetIsMemberOrAdmin(ctx context.Context, arg ProfileProjectRoleGetIsMemberOrAdminParams) (bool, error) { + row := q.db.QueryRow(ctx, profileProjectRoleGetIsMemberOrAdmin, arg.ProfileID, arg.ProjectID) + var exists bool + err := row.Scan(&exists) + return exists, err +} + +const profileProjectRoleListForProject = `-- name: ProfileProjectRoleListForProject :many +select id, profile_id, username, email, role_id, role +from v_profile_project_roles +where project_id = $1 +order by email +` + +type ProfileProjectRoleListForProjectRow struct { + ID uuid.UUID `json:"id"` + ProfileID uuid.UUID `json:"profile_id"` + Username string `json:"username"` + Email string `json:"email"` + RoleID uuid.UUID `json:"role_id"` + Role string `json:"role"` +} + +func (q *Queries) ProfileProjectRoleListForProject(ctx context.Context, projectID uuid.UUID) ([]ProfileProjectRoleListForProjectRow, error) { + rows, err := q.db.Query(ctx, profileProjectRoleListForProject, projectID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []ProfileProjectRoleListForProjectRow{} + for rows.Next() { + var i ProfileProjectRoleListForProjectRow + if err := rows.Scan( + &i.ID, + &i.ProfileID, + &i.Username, + &i.Email, + &i.RoleID, + &i.Role, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} diff --git a/api/internal/db/querier.go b/api/internal/db/querier.go new file mode 100644 index 00000000..fea94f1e --- /dev/null +++ b/api/internal/db/querier.go @@ -0,0 +1,330 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.27.0 + +package db + +import ( + "context" + "time" + + "github.com/google/uuid" +) + +type Querier interface { + AlertConfigCreate(ctx context.Context, arg AlertConfigCreateParams) (uuid.UUID, error) + AlertConfigDelete(ctx context.Context, id uuid.UUID) error + AlertConfigGet(ctx context.Context, id uuid.UUID) (VAlertConfig, error) + AlertConfigInstrumentCreateAssignment(ctx context.Context, arg AlertConfigInstrumentCreateAssignmentParams) error + AlertConfigInstrumentDeleteAssignmentsForAlertConfig(ctx context.Context, alertConfigID uuid.UUID) error + AlertConfigListForInstrument(ctx context.Context, instrumentID uuid.UUID) ([]VAlertConfig, error) + AlertConfigListForProject(ctx context.Context, projectID uuid.UUID) ([]VAlertConfig, error) + AlertConfigListForProjectAlertType(ctx context.Context, arg AlertConfigListForProjectAlertTypeParams) ([]VAlertConfig, error) + AlertConfigListUpdateLastCheckedAt(ctx context.Context) ([]VAlertConfig, error) + AlertConfigUpdate(ctx context.Context, arg AlertConfigUpdateParams) error + AlertConfigUpdateLastRemindedAt(ctx context.Context, arg AlertConfigUpdateLastRemindedAtParams) error + AlertCreate(ctx context.Context, alertConfigID uuid.UUID) error + AlertCreateBatch(ctx context.Context, alertConfigID []uuid.UUID) *AlertCreateBatchBatchResults + AlertEmailSubscriptionCreate(ctx context.Context, arg AlertEmailSubscriptionCreateParams) error + AlertEmailSubscriptionDelete(ctx context.Context, arg AlertEmailSubscriptionDeleteParams) error + AlertEmailSubscritpionDeleteForAlertConfig(ctx context.Context, alertConfigID uuid.UUID) error + AlertGet(ctx context.Context, arg AlertGetParams) (AlertGetRow, error) + AlertListForInstrument(ctx context.Context, instrumentID uuid.UUID) ([]VAlert, error) + AlertListForProfile(ctx context.Context, profileID uuid.UUID) ([]AlertListForProfileRow, error) + AlertListForProject(ctx context.Context, projectID uuid.UUID) ([]VAlert, error) + AlertProfileSubscriptionCreate(ctx context.Context, arg AlertProfileSubscriptionCreateParams) error + AlertProfileSubscriptionCreateOnAnyConflictDoNothing(ctx context.Context, arg AlertProfileSubscriptionCreateOnAnyConflictDoNothingParams) error + AlertProfileSubscriptionDelete(ctx context.Context, arg AlertProfileSubscriptionDeleteParams) error + AlertProfileSubscritpionDeleteForAlertConfig(ctx context.Context, alertConfigID uuid.UUID) error + AlertReadCreate(ctx context.Context, arg AlertReadCreateParams) error + AlertReadDelete(ctx context.Context, arg AlertReadDeleteParams) error + AlertSubscriptionGet(ctx context.Context, id uuid.UUID) (AlertProfileSubscription, error) + AlertSubscriptionGetForAlertConfigProfile(ctx context.Context, arg AlertSubscriptionGetForAlertConfigProfileParams) (AlertProfileSubscription, error) + AlertSubscriptionListForProfile(ctx context.Context, profileID uuid.UUID) ([]AlertProfileSubscription, error) + AlertSubscriptionUpdateForProfile(ctx context.Context, arg AlertSubscriptionUpdateForProfileParams) error + AwareParameterList(ctx context.Context) ([]AwareParameterListRow, error) + AwarePlatformCreate(ctx context.Context, arg AwarePlatformCreateParams) error + AwarePlatformCreateBatch(ctx context.Context, arg []AwarePlatformCreateBatchParams) *AwarePlatformCreateBatchBatchResults + AwarePlatformParameterListEnabled(ctx context.Context) ([]VAwarePlatformParameterEnabled, error) + CalculationCreate(ctx context.Context, arg CalculationCreateParams) error + CalculationUpdate(ctx context.Context, arg CalculationUpdateParams) error + CollectionGroupCreate(ctx context.Context, arg CollectionGroupCreateParams) (CollectionGroup, error) + CollectionGroupDelete(ctx context.Context, arg CollectionGroupDeleteParams) error + CollectionGroupDetailsGet(ctx context.Context, id uuid.UUID) (VCollectionGroupDetails, error) + CollectionGroupListForProject(ctx context.Context, projectID uuid.UUID) ([]CollectionGroup, error) + CollectionGroupTimeseriesCreate(ctx context.Context, arg CollectionGroupTimeseriesCreateParams) error + CollectionGroupTimeseriesDelete(ctx context.Context, arg CollectionGroupTimeseriesDeleteParams) error + CollectionGroupTimeseriesUpdateSortOrder(ctx context.Context, arg CollectionGroupTimeseriesUpdateSortOrderParams) error + CollectionGroupUpdate(ctx context.Context, arg CollectionGroupUpdateParams) (CollectionGroup, error) + DataloggerCreate(ctx context.Context, arg DataloggerCreateParams) (uuid.UUID, error) + DataloggerDelete(ctx context.Context, arg DataloggerDeleteParams) error + DataloggerErrorCreate(ctx context.Context, arg DataloggerErrorCreateParams) error + DataloggerErrorCreateBatch(ctx context.Context, arg []DataloggerErrorCreateBatchParams) *DataloggerErrorCreateBatchBatchResults + DataloggerErrorDelete(ctx context.Context, arg DataloggerErrorDeleteParams) error + DataloggerGet(ctx context.Context, id uuid.UUID) (VDatalogger, error) + DataloggerGetActive(ctx context.Context, arg DataloggerGetActiveParams) (bool, error) + DataloggerGetExists(ctx context.Context, id uuid.UUID) (bool, error) + DataloggerGetForModelSn(ctx context.Context, arg DataloggerGetForModelSnParams) (VDatalogger, error) + DataloggerGetModelName(ctx context.Context, id uuid.UUID) (*string, error) + DataloggerHashCreate(ctx context.Context, arg DataloggerHashCreateParams) error + DataloggerHashGetForModelSn(ctx context.Context, arg DataloggerHashGetForModelSnParams) (string, error) + DataloggerHashUpdate(ctx context.Context, arg DataloggerHashUpdateParams) error + DataloggerList(ctx context.Context) ([]VDatalogger, error) + DataloggerListForProject(ctx context.Context, projectID uuid.UUID) ([]VDatalogger, error) + DataloggerTableDelete(ctx context.Context, id uuid.UUID) error + DataloggerTableGetIsValid(ctx context.Context, id uuid.UUID) (bool, error) + DataloggerTableGetOrCreate(ctx context.Context, arg DataloggerTableGetOrCreateParams) (uuid.UUID, error) + DataloggerTablePreviewCreate(ctx context.Context, arg DataloggerTablePreviewCreateParams) error + DataloggerTablePreviewGet(ctx context.Context, dataloggerTableID uuid.UUID) (VDataloggerPreview, error) + DataloggerTablePreviewUpdate(ctx context.Context, arg DataloggerTablePreviewUpdateParams) error + DataloggerTableUpdateNameIfEmpty(ctx context.Context, arg DataloggerTableUpdateNameIfEmptyParams) error + DataloggerUpdate(ctx context.Context, arg DataloggerUpdateParams) error + DataloggerUpdateAuditInfo(ctx context.Context, arg DataloggerUpdateAuditInfoParams) error + DataloggerUpdateTableNameBlank(ctx context.Context, id uuid.UUID) error + DistrictList(ctx context.Context) ([]VDistrict, error) + DistrictRollupListEvaluationForProjectAlertConfig(ctx context.Context, arg DistrictRollupListEvaluationForProjectAlertConfigParams) ([]VDistrictRollup, error) + DistrictRollupListMeasurementForProjectAlertConfig(ctx context.Context, arg DistrictRollupListMeasurementForProjectAlertConfigParams) ([]VDistrictRollup, error) + DomainGroupList(ctx context.Context) ([]VDomainGroup, error) + DomainList(ctx context.Context) ([]VDomain, error) + EmailAutocompleteList(ctx context.Context, arg EmailAutocompleteListParams) ([]EmailAutocompleteListRow, error) + EmailDelete(ctx context.Context, id uuid.UUID) error + EmailGetOrCreate(ctx context.Context, email string) (uuid.UUID, error) + EquivalencyTableCreateOrUpdate(ctx context.Context, arg EquivalencyTableCreateOrUpdateParams) error + EquivalencyTableDelete(ctx context.Context, id uuid.UUID) error + EquivalencyTableDeleteForDataloggerTable(ctx context.Context, dataloggerTableID *uuid.UUID) error + EquivalencyTableGet(ctx context.Context, dataloggerTableID uuid.UUID) (VDataloggerEquivalencyTable, error) + EquivalencyTableTimeseriesGetIsValid(ctx context.Context, id uuid.UUID) (bool, error) + EquivalencyTableUpdate(ctx context.Context, arg EquivalencyTableUpdateParams) error + EvaluationCreate(ctx context.Context, arg EvaluationCreateParams) (uuid.UUID, error) + EvaluationDelete(ctx context.Context, id uuid.UUID) error + EvaluationGet(ctx context.Context, id uuid.UUID) (VEvaluation, error) + EvaluationInstrumentCreate(ctx context.Context, arg EvaluationInstrumentCreateParams) error + EvaluationInstrumentCreateBatch(ctx context.Context, arg []EvaluationInstrumentCreateBatchParams) *EvaluationInstrumentCreateBatchBatchResults + EvaluationInstrumentDeleteForEvaluation(ctx context.Context, evaluationID *uuid.UUID) error + EvaluationListForInstrument(ctx context.Context, instrumentID *uuid.UUID) ([]VEvaluation, error) + EvaluationListForProject(ctx context.Context, projectID uuid.UUID) ([]VEvaluation, error) + EvaluationListForProjectAlertConfig(ctx context.Context, arg EvaluationListForProjectAlertConfigParams) ([]VEvaluation, error) + EvaluationUpdate(ctx context.Context, arg EvaluationUpdateParams) error + HeartbeatCreate(ctx context.Context, argTime time.Time) (time.Time, error) + HeartbeatGetLatest(ctx context.Context) (time.Time, error) + HeartbeatList(ctx context.Context, resultLimit int32) ([]time.Time, error) + HomeGet(ctx context.Context) (HomeGetRow, error) + InclMeasurementListForInstrumentRange(ctx context.Context, arg InclMeasurementListForInstrumentRangeParams) ([]VInclMeasurement, error) + InclOptsCreate(ctx context.Context, arg InclOptsCreateParams) error + InclOptsCreateBatch(ctx context.Context, arg []InclOptsCreateBatchParams) *InclOptsCreateBatchBatchResults + InclOptsUpdate(ctx context.Context, arg InclOptsUpdateParams) error + InclOptsUpdateBatch(ctx context.Context, arg []InclOptsUpdateBatchParams) *InclOptsUpdateBatchBatchResults + InclSegmentCreate(ctx context.Context, arg InclSegmentCreateParams) error + InclSegmentCreateBatch(ctx context.Context, arg []InclSegmentCreateBatchParams) *InclSegmentCreateBatchBatchResults + InclSegmentListForInstrument(ctx context.Context, instrumentID uuid.UUID) ([]VInclSegment, error) + InclSegmentUpdate(ctx context.Context, arg InclSegmentUpdateParams) error + InclSegmentUpdateBatch(ctx context.Context, arg []InclSegmentUpdateBatchParams) *InclSegmentUpdateBatchBatchResults + InstrumentConstantCreate(ctx context.Context, arg InstrumentConstantCreateParams) error + InstrumentConstantCreateBatch(ctx context.Context, arg []InstrumentConstantCreateBatchParams) *InstrumentConstantCreateBatchBatchResults + InstrumentConstantDelete(ctx context.Context, arg InstrumentConstantDeleteParams) error + InstrumentConstantList(ctx context.Context, instrumentID uuid.UUID) ([]VTimeseries, error) + InstrumentCreate(ctx context.Context, arg InstrumentCreateParams) (InstrumentCreateRow, error) + InstrumentCreateBatch(ctx context.Context, arg []InstrumentCreateBatchParams) *InstrumentCreateBatchBatchResults + InstrumentDeleteFlag(ctx context.Context, arg InstrumentDeleteFlagParams) error + InstrumentGet(ctx context.Context, id uuid.UUID) (VInstrument, error) + InstrumentGetCount(ctx context.Context) (int64, error) + InstrumentGroupCreate(ctx context.Context, arg InstrumentGroupCreateParams) (InstrumentGroupCreateRow, error) + InstrumentGroupCreateBatch(ctx context.Context, arg []InstrumentGroupCreateBatchParams) *InstrumentGroupCreateBatchBatchResults + InstrumentGroupDeleteFlag(ctx context.Context, id uuid.UUID) error + InstrumentGroupGet(ctx context.Context, id uuid.UUID) (VInstrumentGroup, error) + InstrumentGroupInstrumentCreate(ctx context.Context, arg InstrumentGroupInstrumentCreateParams) error + InstrumentGroupInstrumentDelete(ctx context.Context, arg InstrumentGroupInstrumentDeleteParams) error + InstrumentGroupList(ctx context.Context) ([]VInstrumentGroup, error) + InstrumentGroupListForProject(ctx context.Context, projectID *uuid.UUID) ([]VInstrumentGroup, error) + InstrumentGroupUpdate(ctx context.Context, arg InstrumentGroupUpdateParams) (InstrumentGroupUpdateRow, error) + InstrumentIDNameListByIDs(ctx context.Context, instrumentIds []uuid.UUID) ([]InstrumentIDNameListByIDsRow, error) + InstrumentListForInstrumentGroup(ctx context.Context, instrumentGroupID uuid.UUID) ([]VInstrument, error) + InstrumentListForProject(ctx context.Context, projectID uuid.UUID) ([]VInstrument, error) + InstrumentNoteCreate(ctx context.Context, arg InstrumentNoteCreateParams) (InstrumentNote, error) + InstrumentNoteCreateBatch(ctx context.Context, arg []InstrumentNoteCreateBatchParams) *InstrumentNoteCreateBatchBatchResults + InstrumentNoteDelete(ctx context.Context, id uuid.UUID) error + InstrumentNoteGet(ctx context.Context, id uuid.UUID) (InstrumentNote, error) + InstrumentNoteListForInstrument(ctx context.Context, instrumentID uuid.UUID) ([]InstrumentNote, error) + InstrumentNoteUpdate(ctx context.Context, arg InstrumentNoteUpdateParams) (InstrumentNote, error) + InstrumentStatusCreateOrUpdate(ctx context.Context, arg InstrumentStatusCreateOrUpdateParams) error + InstrumentStatusCreateOrUpdateBatch(ctx context.Context, arg []InstrumentStatusCreateOrUpdateBatchParams) *InstrumentStatusCreateOrUpdateBatchBatchResults + InstrumentStatusDelete(ctx context.Context, id uuid.UUID) error + InstrumentStatusGet(ctx context.Context, id uuid.UUID) (VInstrumentStatus, error) + InstrumentStatusListForInstrument(ctx context.Context, instrumentID uuid.UUID) ([]VInstrumentStatus, error) + InstrumentUpdate(ctx context.Context, arg InstrumentUpdateParams) error + InstrumentUpdateGeometry(ctx context.Context, arg InstrumentUpdateGeometryParams) (uuid.UUID, error) + IpiMeasurementListForInstrumentRange(ctx context.Context, arg IpiMeasurementListForInstrumentRangeParams) ([]VIpiMeasurement, error) + IpiOptsCreate(ctx context.Context, arg IpiOptsCreateParams) error + IpiOptsCreateBatch(ctx context.Context, arg []IpiOptsCreateBatchParams) *IpiOptsCreateBatchBatchResults + IpiOptsUpdate(ctx context.Context, arg IpiOptsUpdateParams) error + IpiOptsUpdateBatch(ctx context.Context, arg []IpiOptsUpdateBatchParams) *IpiOptsUpdateBatchBatchResults + IpiSegmentCreate(ctx context.Context, arg IpiSegmentCreateParams) error + IpiSegmentCreateBatch(ctx context.Context, arg []IpiSegmentCreateBatchParams) *IpiSegmentCreateBatchBatchResults + IpiSegmentListForInstrument(ctx context.Context, instrumentID uuid.UUID) ([]VIpiSegment, error) + IpiSegmentUpdate(ctx context.Context, arg IpiSegmentUpdateParams) error + IpiSegmentUpdateBatch(ctx context.Context, arg []IpiSegmentUpdateBatchParams) *IpiSegmentUpdateBatchBatchResults + PgTimezoneNamesList(ctx context.Context) ([]PgTimezoneNamesListRow, error) + PlotBullseyeConfigCreate(ctx context.Context, arg PlotBullseyeConfigCreateParams) error + PlotBullseyeConfigDelete(ctx context.Context, plotConfigID uuid.UUID) error + PlotBullseyeConfigUpdate(ctx context.Context, arg PlotBullseyeConfigUpdateParams) error + PlotConfigCreate(ctx context.Context, arg PlotConfigCreateParams) (uuid.UUID, error) + PlotConfigCustomShapeCreate(ctx context.Context, arg PlotConfigCustomShapeCreateParams) error + PlotConfigCustomShapeCreateBatch(ctx context.Context, arg []PlotConfigCustomShapeCreateBatchParams) *PlotConfigCustomShapeCreateBatchBatchResults + PlotConfigCustomShapeDeleteForPlotConfig(ctx context.Context, plotConfigurationID *uuid.UUID) error + PlotConfigCustomShapeUpdate(ctx context.Context, arg PlotConfigCustomShapeUpdateParams) error + PlotConfigDelete(ctx context.Context, arg PlotConfigDeleteParams) error + PlotConfigGet(ctx context.Context, id uuid.UUID) (VPlotConfiguration, error) + PlotConfigListForProject(ctx context.Context, projectID uuid.UUID) ([]VPlotConfiguration, error) + PlotConfigMeasurementListBullseye(ctx context.Context, arg PlotConfigMeasurementListBullseyeParams) ([]PlotConfigMeasurementListBullseyeRow, error) + PlotConfigMeasurementListContour(ctx context.Context, arg PlotConfigMeasurementListContourParams) ([]PlotConfigMeasurementListContourRow, error) + PlotConfigScatterLineLayoutCreate(ctx context.Context, arg PlotConfigScatterLineLayoutCreateParams) error + PlotConfigScatterLineLayoutUpdate(ctx context.Context, arg PlotConfigScatterLineLayoutUpdateParams) error + PlotConfigSettingsCreate(ctx context.Context, arg PlotConfigSettingsCreateParams) error + PlotConfigSettingsDelete(ctx context.Context, id uuid.UUID) error + PlotConfigTimeseriesTraceCreate(ctx context.Context, arg PlotConfigTimeseriesTraceCreateParams) error + PlotConfigTimeseriesTraceDeleteForPlotConfig(ctx context.Context, plotConfigurationID *uuid.UUID) error + PlotConfigTimeseriesTraceUpdate(ctx context.Context, arg PlotConfigTimeseriesTraceUpdateParams) error + PlotConfigTimeseriesTracesCreateBatch(ctx context.Context, arg []PlotConfigTimeseriesTracesCreateBatchParams) *PlotConfigTimeseriesTracesCreateBatchBatchResults + PlotConfigUpdate(ctx context.Context, arg PlotConfigUpdateParams) error + PlotContourConfigCreate(ctx context.Context, arg PlotContourConfigCreateParams) error + PlotContourConfigDelete(ctx context.Context, plotConfigID uuid.UUID) error + PlotContourConfigListTimeRange(ctx context.Context, arg PlotContourConfigListTimeRangeParams) ([]time.Time, error) + PlotContourConfigTimeseriesCreate(ctx context.Context, arg PlotContourConfigTimeseriesCreateParams) error + PlotContourConfigTimeseriesCreateBatch(ctx context.Context, arg []PlotContourConfigTimeseriesCreateBatchParams) *PlotContourConfigTimeseriesCreateBatchBatchResults + PlotContourConfigTimeseriesDeleteForPlotContourConfig(ctx context.Context, plotContourConfigID uuid.UUID) error + PlotContourConfigUpdate(ctx context.Context, arg PlotContourConfigUpdateParams) error + PlotProfileConfigCreate(ctx context.Context, arg PlotProfileConfigCreateParams) error + PlotProfileConfigUpdate(ctx context.Context, arg PlotProfileConfigUpdateParams) error + ProfileCreate(ctx context.Context, arg ProfileCreateParams) (ProfileCreateRow, error) + ProfileGetForEDIPI(ctx context.Context, edipi int64) (VProfile, error) + ProfileGetForEmail(ctx context.Context, email string) (VProfile, error) + ProfileGetForToken(ctx context.Context, tokenID string) (ProfileGetForTokenRow, error) + ProfileGetForUsername(ctx context.Context, username string) (VProfile, error) + ProfileProjectRoleCreate(ctx context.Context, arg ProfileProjectRoleCreateParams) (uuid.UUID, error) + ProfileProjectRoleDelete(ctx context.Context, arg ProfileProjectRoleDeleteParams) error + ProfileProjectRoleGet(ctx context.Context, id uuid.UUID) (ProfileProjectRoleGetRow, error) + ProfileProjectRoleGetIsAdmin(ctx context.Context, arg ProfileProjectRoleGetIsAdminParams) (bool, error) + ProfileProjectRoleGetIsMemberOrAdmin(ctx context.Context, arg ProfileProjectRoleGetIsMemberOrAdminParams) (bool, error) + ProfileProjectRoleListForProject(ctx context.Context, projectID uuid.UUID) ([]ProfileProjectRoleListForProjectRow, error) + ProfileTokenCreate(ctx context.Context, arg ProfileTokenCreateParams) (ProfileToken, error) + ProfileTokenDelete(ctx context.Context, arg ProfileTokenDeleteParams) error + ProfileTokenGet(ctx context.Context, tokenID string) (ProfileToken, error) + ProfileTokenList(ctx context.Context, profileID uuid.UUID) ([]ProfileTokenListRow, error) + ProfileUpdateForEDIPI(ctx context.Context, arg ProfileUpdateForEDIPIParams) error + ProfileUpdateForEmail(ctx context.Context, arg ProfileUpdateForEmailParams) error + ProfileUpdateForUsername(ctx context.Context, arg ProfileUpdateForUsernameParams) error + ProjectCreateBatch(ctx context.Context, arg []ProjectCreateBatchParams) *ProjectCreateBatchBatchResults + ProjectDeleteFlag(ctx context.Context, id uuid.UUID) error + ProjectGet(ctx context.Context, id uuid.UUID) (VProject, error) + ProjectGetCount(ctx context.Context) (int64, error) + ProjectInstrumentCreate(ctx context.Context, arg ProjectInstrumentCreateParams) error + ProjectInstrumentCreateBatch(ctx context.Context, arg []ProjectInstrumentCreateBatchParams) *ProjectInstrumentCreateBatchBatchResults + ProjectInstrumentDelete(ctx context.Context, arg ProjectInstrumentDeleteParams) error + ProjectInstrumentDeleteBatch(ctx context.Context, arg []ProjectInstrumentDeleteBatchParams) *ProjectInstrumentDeleteBatchBatchResults + ProjectInstrumentListCountByInstrument(ctx context.Context, instrumentIds []uuid.UUID) ([]ProjectInstrumentListCountByInstrumentRow, error) + ProjectInstrumentListForInstrumentNameProjects(ctx context.Context, arg ProjectInstrumentListForInstrumentNameProjectsParams) ([]string, error) + ProjectInstrumentListForInstrumentProjectsProfileAdmin(ctx context.Context, arg ProjectInstrumentListForInstrumentProjectsProfileAdminParams) ([]string, error) + ProjectInstrumentListForInstrumentsProfileAdmin(ctx context.Context, arg ProjectInstrumentListForInstrumentsProfileAdminParams) ([]ProjectInstrumentListForInstrumentsProfileAdminRow, error) + ProjectInstrumentListForProjectInstrumentNames(ctx context.Context, arg ProjectInstrumentListForProjectInstrumentNamesParams) ([]string, error) + ProjectInstrumentListProjectIDForInstrument(ctx context.Context, instrumentID uuid.UUID) ([]uuid.UUID, error) + ProjectList(ctx context.Context) ([]VProject, error) + ProjectListForFederalID(ctx context.Context, federalID *string) ([]VProject, error) + ProjectListForNameSearch(ctx context.Context, arg ProjectListForNameSearchParams) ([]VProject, error) + ProjectListForProfileAdmin(ctx context.Context, profileID uuid.UUID) ([]uuid.UUID, error) + ProjectListForProfileRole(ctx context.Context, arg ProjectListForProfileRoleParams) ([]VProject, error) + ProjectUpdate(ctx context.Context, arg ProjectUpdateParams) (uuid.UUID, error) + ProjectUpdateImage(ctx context.Context, arg ProjectUpdateImageParams) error + ReportConfigCreate(ctx context.Context, arg ReportConfigCreateParams) (uuid.UUID, error) + ReportConfigDelete(ctx context.Context, id uuid.UUID) error + ReportConfigGet(ctx context.Context, id uuid.UUID) (VReportConfig, error) + ReportConfigListForProject(ctx context.Context, projectID uuid.UUID) ([]VReportConfig, error) + ReportConfigListForReportConfigWithPlotConfig(ctx context.Context, reportConfigID uuid.UUID) ([]VPlotConfiguration, error) + ReportConfigPlotConfigCreate(ctx context.Context, arg ReportConfigPlotConfigCreateParams) error + ReportConfigPlotConfigCreateBatch(ctx context.Context, arg []ReportConfigPlotConfigCreateBatchParams) *ReportConfigPlotConfigCreateBatchBatchResults + ReportConfigPlotConfigDelete(ctx context.Context, arg ReportConfigPlotConfigDeleteParams) error + ReportConfigPlotConfigDeleteBatch(ctx context.Context, arg []ReportConfigPlotConfigDeleteBatchParams) *ReportConfigPlotConfigDeleteBatchBatchResults + ReportConfigPlotConfigDeleteForReportConfig(ctx context.Context, reportConfigID uuid.UUID) error + ReportConfigUpdate(ctx context.Context, arg ReportConfigUpdateParams) error + ReportDownloadJobCreate(ctx context.Context, arg ReportDownloadJobCreateParams) (ReportDownloadJob, error) + ReportDownloadJobGet(ctx context.Context, arg ReportDownloadJobGetParams) (ReportDownloadJob, error) + ReportDownloadJobUpdate(ctx context.Context, arg ReportDownloadJobUpdateParams) error + SaaMeasurementListForInstrumentRange(ctx context.Context, arg SaaMeasurementListForInstrumentRangeParams) ([]VSaaMeasurement, error) + SaaOptsCreate(ctx context.Context, arg SaaOptsCreateParams) error + SaaOptsCreateBatch(ctx context.Context, arg []SaaOptsCreateBatchParams) *SaaOptsCreateBatchBatchResults + SaaOptsUpdate(ctx context.Context, arg SaaOptsUpdateParams) error + SaaOptsUpdateBatch(ctx context.Context, arg []SaaOptsUpdateBatchParams) *SaaOptsUpdateBatchBatchResults + SaaSegmentCreate(ctx context.Context, arg SaaSegmentCreateParams) error + SaaSegmentCreateBatch(ctx context.Context, arg []SaaSegmentCreateBatchParams) *SaaSegmentCreateBatchBatchResults + SaaSegmentListForInstrument(ctx context.Context, instrumentID uuid.UUID) ([]VSaaSegment, error) + SaaSegmentUpdate(ctx context.Context, arg SaaSegmentUpdateParams) error + SaaSegmentUpdateBatch(ctx context.Context, arg []SaaSegmentUpdateBatchParams) *SaaSegmentUpdateBatchBatchResults + SubmittalCreateNextEvaluation(ctx context.Context, id uuid.UUID) error + SubmittalCreateNextFromExistingAlertConfigDate(ctx context.Context, id uuid.UUID) error + SubmittalCreateNextFromNewAlertConfigDate(ctx context.Context, arg SubmittalCreateNextFromNewAlertConfigDateParams) error + SubmittalListForAlertConfig(ctx context.Context, arg SubmittalListForAlertConfigParams) ([]VSubmittal, error) + SubmittalListForInstrument(ctx context.Context, arg SubmittalListForInstrumentParams) ([]VSubmittal, error) + SubmittalListForProject(ctx context.Context, arg SubmittalListForProjectParams) ([]VSubmittal, error) + SubmittalListIncompleteEvaluation(ctx context.Context) ([]VAlertCheckEvaluationSubmittal, error) + SubmittalListIncompleteMeasurement(ctx context.Context) ([]VAlertCheckMeasurementSubmittal, error) + SubmittalListUnverifiedMissing(ctx context.Context) ([]VSubmittal, error) + SubmittalUpdate(ctx context.Context, arg SubmittalUpdateParams) error + SubmittalUpdateCompleteEvaluation(ctx context.Context, id uuid.UUID) (Submittal, error) + SubmittalUpdateCompletionDateOrWarningSent(ctx context.Context, arg SubmittalUpdateCompletionDateOrWarningSentParams) error + SubmittalUpdateNextForAlertConfig(ctx context.Context, alertConfigID *uuid.UUID) (uuid.UUID, error) + SubmittalUpdateVerifyMissing(ctx context.Context, id uuid.UUID) error + SubmittalUpdateVerifyMissingForAlertConfig(ctx context.Context, alertConfigID *uuid.UUID) error + // the below queried are needed becuase the slug is currently used as the variable name, it would + // be better if we used a generated column for this on the timeseries table, maybe converted to snake_case + TimeseriesComputedCreate(ctx context.Context, arg TimeseriesComputedCreateParams) (uuid.UUID, error) + TimeseriesComputedDelete(ctx context.Context, id uuid.UUID) error + TimeseriesComputedGet(ctx context.Context, id uuid.UUID) (TimeseriesComputedGetRow, error) + TimeseriesComputedListForInstrument(ctx context.Context, instrumentID *uuid.UUID) ([]TimeseriesComputedListForInstrumentRow, error) + TimeseriesComputedUpdate(ctx context.Context, arg TimeseriesComputedUpdateParams) error + TimeseriesCreate(ctx context.Context, arg TimeseriesCreateParams) (TimeseriesCreateRow, error) + TimeseriesCreateBatch(ctx context.Context, arg []TimeseriesCreateBatchParams) *TimeseriesCreateBatchBatchResults + TimeseriesCwmsCreate(ctx context.Context, arg TimeseriesCwmsCreateParams) error + TimeseriesCwmsCreateBatch(ctx context.Context, arg []TimeseriesCwmsCreateBatchParams) *TimeseriesCwmsCreateBatchBatchResults + TimeseriesCwmsGet(ctx context.Context, id uuid.UUID) (VTimeseriesCwms, error) + TimeseriesCwmsList(ctx context.Context, instrumentID uuid.UUID) ([]VTimeseriesCwms, error) + TimeseriesCwmsUpdate(ctx context.Context, arg TimeseriesCwmsUpdateParams) error + TimeseriesDelete(ctx context.Context, id uuid.UUID) error + TimeseriesGet(ctx context.Context, id uuid.UUID) (VTimeseries, error) + TimeseriesGetAllBelongToProject(ctx context.Context, arg TimeseriesGetAllBelongToProjectParams) (bool, error) + TimeseriesGetExistsStored(ctx context.Context, id uuid.UUID) (bool, error) + TimeseriesListForInstrument(ctx context.Context, instrumentID uuid.UUID) ([]VTimeseries, error) + TimeseriesListForInstrumentGroup(ctx context.Context, instrumentGroupID uuid.UUID) ([]VTimeseries, error) + TimeseriesListForPlotConfig(ctx context.Context, plotConfigurationID *uuid.UUID) ([]VTimeseries, error) + TimeseriesListForProject(ctx context.Context, projectID uuid.UUID) ([]VTimeseries, error) + TimeseriesMeasurementCreate(ctx context.Context, arg TimeseriesMeasurementCreateParams) error + TimeseriesMeasurementCreateBatch(ctx context.Context, arg []TimeseriesMeasurementCreateBatchParams) *TimeseriesMeasurementCreateBatchBatchResults + TimeseriesMeasurementCreateOrUpdate(ctx context.Context, arg TimeseriesMeasurementCreateOrUpdateParams) error + TimeseriesMeasurementCreateOrUpdateAtTimezoneBatch(ctx context.Context, arg []TimeseriesMeasurementCreateOrUpdateAtTimezoneBatchParams) *TimeseriesMeasurementCreateOrUpdateAtTimezoneBatchBatchResults + TimeseriesMeasurementCreateOrUpdateBatch(ctx context.Context, arg []TimeseriesMeasurementCreateOrUpdateBatchParams) *TimeseriesMeasurementCreateOrUpdateBatchBatchResults + TimeseriesMeasurementDelete(ctx context.Context, arg TimeseriesMeasurementDeleteParams) error + TimeseriesMeasurementDeleteBatch(ctx context.Context, arg []TimeseriesMeasurementDeleteBatchParams) *TimeseriesMeasurementDeleteBatchBatchResults + TimeseriesMeasurementDeleteRange(ctx context.Context, arg TimeseriesMeasurementDeleteRangeParams) error + TimeseriesMeasurementDeleteRangeBatch(ctx context.Context, arg []TimeseriesMeasurementDeleteRangeBatchParams) *TimeseriesMeasurementDeleteRangeBatchBatchResults + TimeseriesMeasurementGetMostRecent(ctx context.Context, timeseriesID uuid.UUID) (TimeseriesMeasurement, error) + TimeseriesMeasurementListForRange(ctx context.Context, arg TimeseriesMeasurementListForRangeParams) ([]VTimeseriesMeasurement, error) + TimeseriesNoteCreate(ctx context.Context, arg TimeseriesNoteCreateParams) error + TimeseriesNoteCreateBatch(ctx context.Context, arg []TimeseriesNoteCreateBatchParams) *TimeseriesNoteCreateBatchBatchResults + TimeseriesNoteCreateOrUpdate(ctx context.Context, arg TimeseriesNoteCreateOrUpdateParams) error + TimeseriesNoteCreateOrUpdateAtTimezoneBatch(ctx context.Context, arg []TimeseriesNoteCreateOrUpdateAtTimezoneBatchParams) *TimeseriesNoteCreateOrUpdateAtTimezoneBatchBatchResults + TimeseriesNoteCreateOrUpdateBatch(ctx context.Context, arg []TimeseriesNoteCreateOrUpdateBatchParams) *TimeseriesNoteCreateOrUpdateBatchBatchResults + TimeseriesNoteDelete(ctx context.Context, arg TimeseriesNoteDeleteParams) error + TimeseriesNoteDeleteBatch(ctx context.Context, arg []TimeseriesNoteDeleteBatchParams) *TimeseriesNoteDeleteBatchBatchResults + TimeseriesNoteDeleteRange(ctx context.Context, arg TimeseriesNoteDeleteRangeParams) error + TimeseriesNoteDeleteRangeBatch(ctx context.Context, arg []TimeseriesNoteDeleteRangeBatchParams) *TimeseriesNoteDeleteRangeBatchBatchResults + TimeseriesUpdate(ctx context.Context, arg TimeseriesUpdateParams) error + UnitsList(ctx context.Context) ([]VUnit, error) + UploaderConfigCreate(ctx context.Context, arg UploaderConfigCreateParams) (uuid.UUID, error) + UploaderConfigDelete(ctx context.Context, id uuid.UUID) error + UploaderConfigGet(ctx context.Context, id uuid.UUID) (VUploaderConfig, error) + UploaderConfigListForProject(ctx context.Context, projectID uuid.UUID) ([]VUploaderConfig, error) + UploaderConfigMappingCreateBatch(ctx context.Context, arg []UploaderConfigMappingCreateBatchParams) *UploaderConfigMappingCreateBatchBatchResults + UploaderConfigMappingDeleteForUploaderConfig(ctx context.Context, uploaderConfigID uuid.UUID) error + UploaderConfigMappingList(ctx context.Context, uploaderConfigID uuid.UUID) ([]UploaderConfigMapping, error) + UploaderConfigUpdate(ctx context.Context, arg UploaderConfigUpdateParams) error +} + +var _ Querier = (*Queries)(nil) diff --git a/api/internal/db/report_config.sql_gen.go b/api/internal/db/report_config.sql_gen.go new file mode 100644 index 00000000..e5d02ed3 --- /dev/null +++ b/api/internal/db/report_config.sql_gen.go @@ -0,0 +1,324 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.27.0 +// source: report_config.sql + +package db + +import ( + "context" + "time" + + "github.com/google/uuid" +) + +const reportConfigCreate = `-- name: ReportConfigCreate :one +insert into report_config ( + name, slug, project_id, created_by, description, date_range, date_range_enabled, + show_masked, show_masked_enabled, show_nonvalidated, show_nonvalidated_enabled +) +values ($1, slugify($1, 'report_config'), $2, $3, $4, $5, $6, $7, $8, $9, $10) +returning id +` + +type ReportConfigCreateParams struct { + Name string `json:"name"` + ProjectID uuid.UUID `json:"project_id"` + CreatedBy uuid.UUID `json:"created_by"` + Description string `json:"description"` + DateRange *string `json:"date_range"` + DateRangeEnabled *bool `json:"date_range_enabled"` + ShowMasked *bool `json:"show_masked"` + ShowMaskedEnabled *bool `json:"show_masked_enabled"` + ShowNonvalidated *bool `json:"show_nonvalidated"` + ShowNonvalidatedEnabled *bool `json:"show_nonvalidated_enabled"` +} + +func (q *Queries) ReportConfigCreate(ctx context.Context, arg ReportConfigCreateParams) (uuid.UUID, error) { + row := q.db.QueryRow(ctx, reportConfigCreate, + arg.Name, + arg.ProjectID, + arg.CreatedBy, + arg.Description, + arg.DateRange, + arg.DateRangeEnabled, + arg.ShowMasked, + arg.ShowMaskedEnabled, + arg.ShowNonvalidated, + arg.ShowNonvalidatedEnabled, + ) + var id uuid.UUID + err := row.Scan(&id) + return id, err +} + +const reportConfigDelete = `-- name: ReportConfigDelete :exec +delete from report_config where id=$1 +` + +func (q *Queries) ReportConfigDelete(ctx context.Context, id uuid.UUID) error { + _, err := q.db.Exec(ctx, reportConfigDelete, id) + return err +} + +const reportConfigGet = `-- name: ReportConfigGet :one +select id, slug, name, description, project_id, project_name, district_name, created_by, created_by_username, created_at, updated_by, updated_by_username, updated_at, plot_configs, global_overrides from v_report_config where id = $1 +` + +func (q *Queries) ReportConfigGet(ctx context.Context, id uuid.UUID) (VReportConfig, error) { + row := q.db.QueryRow(ctx, reportConfigGet, id) + var i VReportConfig + err := row.Scan( + &i.ID, + &i.Slug, + &i.Name, + &i.Description, + &i.ProjectID, + &i.ProjectName, + &i.DistrictName, + &i.CreatedBy, + &i.CreatedByUsername, + &i.CreatedAt, + &i.UpdatedBy, + &i.UpdatedByUsername, + &i.UpdatedAt, + &i.PlotConfigs, + &i.GlobalOverrides, + ) + return i, err +} + +const reportConfigListForProject = `-- name: ReportConfigListForProject :many +select id, slug, name, description, project_id, project_name, district_name, created_by, created_by_username, created_at, updated_by, updated_by_username, updated_at, plot_configs, global_overrides from v_report_config where project_id = $1 +` + +func (q *Queries) ReportConfigListForProject(ctx context.Context, projectID uuid.UUID) ([]VReportConfig, error) { + rows, err := q.db.Query(ctx, reportConfigListForProject, projectID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []VReportConfig{} + for rows.Next() { + var i VReportConfig + if err := rows.Scan( + &i.ID, + &i.Slug, + &i.Name, + &i.Description, + &i.ProjectID, + &i.ProjectName, + &i.DistrictName, + &i.CreatedBy, + &i.CreatedByUsername, + &i.CreatedAt, + &i.UpdatedBy, + &i.UpdatedByUsername, + &i.UpdatedAt, + &i.PlotConfigs, + &i.GlobalOverrides, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const reportConfigListForReportConfigWithPlotConfig = `-- name: ReportConfigListForReportConfigWithPlotConfig :many +select id, slug, name, project_id, created_by, created_at, updated_by, updated_at, show_masked, show_nonvalidated, show_comments, auto_range, date_range, threshold, report_configs, plot_type, display from v_plot_configuration where id = any( + select plot_config_id from report_config_plot_config where report_config_id = $1 +) +` + +func (q *Queries) ReportConfigListForReportConfigWithPlotConfig(ctx context.Context, reportConfigID uuid.UUID) ([]VPlotConfiguration, error) { + rows, err := q.db.Query(ctx, reportConfigListForReportConfigWithPlotConfig, reportConfigID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []VPlotConfiguration{} + for rows.Next() { + var i VPlotConfiguration + if err := rows.Scan( + &i.ID, + &i.Slug, + &i.Name, + &i.ProjectID, + &i.CreatedBy, + &i.CreatedAt, + &i.UpdatedBy, + &i.UpdatedAt, + &i.ShowMasked, + &i.ShowNonvalidated, + &i.ShowComments, + &i.AutoRange, + &i.DateRange, + &i.Threshold, + &i.ReportConfigs, + &i.PlotType, + &i.Display, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const reportConfigPlotConfigCreate = `-- name: ReportConfigPlotConfigCreate :exec +insert into report_config_plot_config (report_config_id, plot_config_id) values ($1, $2) +` + +type ReportConfigPlotConfigCreateParams struct { + ReportConfigID uuid.UUID `json:"report_config_id"` + PlotConfigID uuid.UUID `json:"plot_config_id"` +} + +func (q *Queries) ReportConfigPlotConfigCreate(ctx context.Context, arg ReportConfigPlotConfigCreateParams) error { + _, err := q.db.Exec(ctx, reportConfigPlotConfigCreate, arg.ReportConfigID, arg.PlotConfigID) + return err +} + +const reportConfigPlotConfigDelete = `-- name: ReportConfigPlotConfigDelete :exec +delete from report_config_plot_config where report_config_id=$1 and plot_config_id=$2 +` + +type ReportConfigPlotConfigDeleteParams struct { + ReportConfigID uuid.UUID `json:"report_config_id"` + PlotConfigID uuid.UUID `json:"plot_config_id"` +} + +func (q *Queries) ReportConfigPlotConfigDelete(ctx context.Context, arg ReportConfigPlotConfigDeleteParams) error { + _, err := q.db.Exec(ctx, reportConfigPlotConfigDelete, arg.ReportConfigID, arg.PlotConfigID) + return err +} + +const reportConfigPlotConfigDeleteForReportConfig = `-- name: ReportConfigPlotConfigDeleteForReportConfig :exec +delete from report_config_plot_config where report_config_id=$1 +` + +func (q *Queries) ReportConfigPlotConfigDeleteForReportConfig(ctx context.Context, reportConfigID uuid.UUID) error { + _, err := q.db.Exec(ctx, reportConfigPlotConfigDeleteForReportConfig, reportConfigID) + return err +} + +const reportConfigUpdate = `-- name: ReportConfigUpdate :exec +update report_config set name=$2, +updated_by=$3, updated_at=$4, description=$5, date_range=$6, date_range_enabled=$7, show_masked=$8, +show_masked_enabled=$9, show_nonvalidated=$10, show_nonvalidated_enabled=$11 where id=$1 +` + +type ReportConfigUpdateParams struct { + ID uuid.UUID `json:"id"` + Name string `json:"name"` + UpdatedBy *uuid.UUID `json:"updated_by"` + UpdatedAt *time.Time `json:"updated_at"` + Description string `json:"description"` + DateRange *string `json:"date_range"` + DateRangeEnabled *bool `json:"date_range_enabled"` + ShowMasked *bool `json:"show_masked"` + ShowMaskedEnabled *bool `json:"show_masked_enabled"` + ShowNonvalidated *bool `json:"show_nonvalidated"` + ShowNonvalidatedEnabled *bool `json:"show_nonvalidated_enabled"` +} + +func (q *Queries) ReportConfigUpdate(ctx context.Context, arg ReportConfigUpdateParams) error { + _, err := q.db.Exec(ctx, reportConfigUpdate, + arg.ID, + arg.Name, + arg.UpdatedBy, + arg.UpdatedAt, + arg.Description, + arg.DateRange, + arg.DateRangeEnabled, + arg.ShowMasked, + arg.ShowMaskedEnabled, + arg.ShowNonvalidated, + arg.ShowNonvalidatedEnabled, + ) + return err +} + +const reportDownloadJobCreate = `-- name: ReportDownloadJobCreate :one +insert into report_download_job (report_config_id, created_by) values ($1, $2) returning id, report_config_id, created_by, created_at, status, file_key, file_expiry, progress, progress_updated_at +` + +type ReportDownloadJobCreateParams struct { + ReportConfigID *uuid.UUID `json:"report_config_id"` + CreatedBy uuid.UUID `json:"created_by"` +} + +func (q *Queries) ReportDownloadJobCreate(ctx context.Context, arg ReportDownloadJobCreateParams) (ReportDownloadJob, error) { + row := q.db.QueryRow(ctx, reportDownloadJobCreate, arg.ReportConfigID, arg.CreatedBy) + var i ReportDownloadJob + err := row.Scan( + &i.ID, + &i.ReportConfigID, + &i.CreatedBy, + &i.CreatedAt, + &i.Status, + &i.FileKey, + &i.FileExpiry, + &i.Progress, + &i.ProgressUpdatedAt, + ) + return i, err +} + +const reportDownloadJobGet = `-- name: ReportDownloadJobGet :one +select id, report_config_id, created_by, created_at, status, file_key, file_expiry, progress, progress_updated_at from report_download_job where id=$1 and created_by=$2 +` + +type ReportDownloadJobGetParams struct { + ID uuid.UUID `json:"id"` + CreatedBy uuid.UUID `json:"created_by"` +} + +func (q *Queries) ReportDownloadJobGet(ctx context.Context, arg ReportDownloadJobGetParams) (ReportDownloadJob, error) { + row := q.db.QueryRow(ctx, reportDownloadJobGet, arg.ID, arg.CreatedBy) + var i ReportDownloadJob + err := row.Scan( + &i.ID, + &i.ReportConfigID, + &i.CreatedBy, + &i.CreatedAt, + &i.Status, + &i.FileKey, + &i.FileExpiry, + &i.Progress, + &i.ProgressUpdatedAt, + ) + return i, err +} + +const reportDownloadJobUpdate = `-- name: ReportDownloadJobUpdate :exec +update report_download_job set status=$2, progress=$3, progress_updated_at=$4, file_key=$5, file_expiry=$6 where id=$1 +` + +type ReportDownloadJobUpdateParams struct { + ID uuid.UUID `json:"id"` + Status JobStatus `json:"status"` + Progress int32 `json:"progress"` + ProgressUpdatedAt time.Time `json:"progress_updated_at"` + FileKey *string `json:"file_key"` + FileExpiry *time.Time `json:"file_expiry"` +} + +func (q *Queries) ReportDownloadJobUpdate(ctx context.Context, arg ReportDownloadJobUpdateParams) error { + _, err := q.db.Exec(ctx, reportDownloadJobUpdate, + arg.ID, + arg.Status, + arg.Progress, + arg.ProgressUpdatedAt, + arg.FileKey, + arg.FileExpiry, + ) + return err +} diff --git a/api/internal/db/submittal.sql_gen.go b/api/internal/db/submittal.sql_gen.go new file mode 100644 index 00000000..f29e74ec --- /dev/null +++ b/api/internal/db/submittal.sql_gen.go @@ -0,0 +1,250 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.27.0 +// source: submittal.sql + +package db + +import ( + "context" + "time" + + "github.com/google/uuid" +) + +const submittalListForAlertConfig = `-- name: SubmittalListForAlertConfig :many +select id, alert_config_id, alert_config_name, alert_type_id, alert_type_name, project_id, submittal_status_id, submittal_status_name, completed_at, created_at, due_at, marked_as_missing, warning_sent +from v_submittal +where alert_config_id = $1 +and ($2::boolean = false or (completed_at is null and not marked_as_missing)) +order by due_at desc +` + +type SubmittalListForAlertConfigParams struct { + AlertConfigID uuid.UUID `json:"alert_config_id"` + ShowIncompleteMissing bool `json:"show_incomplete_missing"` +} + +func (q *Queries) SubmittalListForAlertConfig(ctx context.Context, arg SubmittalListForAlertConfigParams) ([]VSubmittal, error) { + rows, err := q.db.Query(ctx, submittalListForAlertConfig, arg.AlertConfigID, arg.ShowIncompleteMissing) + if err != nil { + return nil, err + } + defer rows.Close() + items := []VSubmittal{} + for rows.Next() { + var i VSubmittal + if err := rows.Scan( + &i.ID, + &i.AlertConfigID, + &i.AlertConfigName, + &i.AlertTypeID, + &i.AlertTypeName, + &i.ProjectID, + &i.SubmittalStatusID, + &i.SubmittalStatusName, + &i.CompletedAt, + &i.CreatedAt, + &i.DueAt, + &i.MarkedAsMissing, + &i.WarningSent, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const submittalListForInstrument = `-- name: SubmittalListForInstrument :many +select sub.id, sub.alert_config_id, sub.alert_config_name, sub.alert_type_id, sub.alert_type_name, sub.project_id, sub.submittal_status_id, sub.submittal_status_name, sub.completed_at, sub.created_at, sub.due_at, sub.marked_as_missing, sub.warning_sent +from v_submittal sub +inner join alert_config_instrument aci on aci.alert_config_id = sub.alert_config_id +where aci.instrument_id = $1 +and ($2::boolean = false or (completed_at is null and not marked_as_missing)) +order by sub.due_at desc +` + +type SubmittalListForInstrumentParams struct { + InstrumentID uuid.UUID `json:"instrument_id"` + ShowIncompleteMissing bool `json:"show_incomplete_missing"` +} + +func (q *Queries) SubmittalListForInstrument(ctx context.Context, arg SubmittalListForInstrumentParams) ([]VSubmittal, error) { + rows, err := q.db.Query(ctx, submittalListForInstrument, arg.InstrumentID, arg.ShowIncompleteMissing) + if err != nil { + return nil, err + } + defer rows.Close() + items := []VSubmittal{} + for rows.Next() { + var i VSubmittal + if err := rows.Scan( + &i.ID, + &i.AlertConfigID, + &i.AlertConfigName, + &i.AlertTypeID, + &i.AlertTypeName, + &i.ProjectID, + &i.SubmittalStatusID, + &i.SubmittalStatusName, + &i.CompletedAt, + &i.CreatedAt, + &i.DueAt, + &i.MarkedAsMissing, + &i.WarningSent, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const submittalListForProject = `-- name: SubmittalListForProject :many +select id, alert_config_id, alert_config_name, alert_type_id, alert_type_name, project_id, submittal_status_id, submittal_status_name, completed_at, created_at, due_at, marked_as_missing, warning_sent +from v_submittal +where project_id = $1 +and ($2::boolean = false or (completed_at is null and not marked_as_missing)) +order by due_at desc, alert_type_name asc +` + +type SubmittalListForProjectParams struct { + ProjectID uuid.UUID `json:"project_id"` + ShowIncompleteMissing bool `json:"show_incomplete_missing"` +} + +func (q *Queries) SubmittalListForProject(ctx context.Context, arg SubmittalListForProjectParams) ([]VSubmittal, error) { + rows, err := q.db.Query(ctx, submittalListForProject, arg.ProjectID, arg.ShowIncompleteMissing) + if err != nil { + return nil, err + } + defer rows.Close() + items := []VSubmittal{} + for rows.Next() { + var i VSubmittal + if err := rows.Scan( + &i.ID, + &i.AlertConfigID, + &i.AlertConfigName, + &i.AlertTypeID, + &i.AlertTypeName, + &i.ProjectID, + &i.SubmittalStatusID, + &i.SubmittalStatusName, + &i.CompletedAt, + &i.CreatedAt, + &i.DueAt, + &i.MarkedAsMissing, + &i.WarningSent, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const submittalListUnverifiedMissing = `-- name: SubmittalListUnverifiedMissing :many +select id, alert_config_id, alert_config_name, alert_type_id, alert_type_name, project_id, submittal_status_id, submittal_status_name, completed_at, created_at, due_at, marked_as_missing, warning_sent +from v_submittal +where completed_at is null +and not marked_as_missing +order by due_at desc +` + +func (q *Queries) SubmittalListUnverifiedMissing(ctx context.Context) ([]VSubmittal, error) { + rows, err := q.db.Query(ctx, submittalListUnverifiedMissing) + if err != nil { + return nil, err + } + defer rows.Close() + items := []VSubmittal{} + for rows.Next() { + var i VSubmittal + if err := rows.Scan( + &i.ID, + &i.AlertConfigID, + &i.AlertConfigName, + &i.AlertTypeID, + &i.AlertTypeName, + &i.ProjectID, + &i.SubmittalStatusID, + &i.SubmittalStatusName, + &i.CompletedAt, + &i.CreatedAt, + &i.DueAt, + &i.MarkedAsMissing, + &i.WarningSent, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const submittalUpdate = `-- name: SubmittalUpdate :exec +update submittal set + submittal_status_id = $2, + completed_at = $3, + warning_sent = $4 +where id = $1 +` + +type SubmittalUpdateParams struct { + ID uuid.UUID `json:"id"` + SubmittalStatusID *uuid.UUID `json:"submittal_status_id"` + CompletedAt *time.Time `json:"completed_at"` + WarningSent bool `json:"warning_sent"` +} + +func (q *Queries) SubmittalUpdate(ctx context.Context, arg SubmittalUpdateParams) error { + _, err := q.db.Exec(ctx, submittalUpdate, + arg.ID, + arg.SubmittalStatusID, + arg.CompletedAt, + arg.WarningSent, + ) + return err +} + +const submittalUpdateVerifyMissing = `-- name: SubmittalUpdateVerifyMissing :exec +update submittal set + submittal_status_id = '84a0f437-a20a-4ac2-8a5b-f8dc35e8489b'::uuid, + marked_as_missing = true +where id = $1 +and completed_at is null +and now() > due_at +` + +func (q *Queries) SubmittalUpdateVerifyMissing(ctx context.Context, id uuid.UUID) error { + _, err := q.db.Exec(ctx, submittalUpdateVerifyMissing, id) + return err +} + +const submittalUpdateVerifyMissingForAlertConfig = `-- name: SubmittalUpdateVerifyMissingForAlertConfig :exec +update submittal set + submittal_status_id = '84a0f437-a20a-4ac2-8a5b-f8dc35e8489b'::uuid, + marked_as_missing = true +where alert_config_id = $1 +and completed_at is null +and now() > due_at +` + +func (q *Queries) SubmittalUpdateVerifyMissingForAlertConfig(ctx context.Context, alertConfigID *uuid.UUID) error { + _, err := q.db.Exec(ctx, submittalUpdateVerifyMissingForAlertConfig, alertConfigID) + return err +} diff --git a/api/internal/db/timeseries.sql_gen.go b/api/internal/db/timeseries.sql_gen.go new file mode 100644 index 00000000..a876f1bb --- /dev/null +++ b/api/internal/db/timeseries.sql_gen.go @@ -0,0 +1,310 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.27.0 +// source: timeseries.sql + +package db + +import ( + "context" + + "github.com/google/uuid" +) + +const timeseriesCreate = `-- name: TimeseriesCreate :one +insert into timeseries (instrument_id, slug, name, parameter_id, unit_id, type) +values ($1, slugify($2, 'timeseries'), $2, $3, $4, $5) +returning id, instrument_id, slug, name, parameter_id, unit_id, type +` + +type TimeseriesCreateParams struct { + InstrumentID *uuid.UUID `json:"instrument_id"` + Name string `json:"name"` + ParameterID uuid.UUID `json:"parameter_id"` + UnitID uuid.UUID `json:"unit_id"` + Type TimeseriesType `json:"type"` +} + +type TimeseriesCreateRow struct { + ID uuid.UUID `json:"id"` + InstrumentID *uuid.UUID `json:"instrument_id"` + Slug string `json:"slug"` + Name string `json:"name"` + ParameterID uuid.UUID `json:"parameter_id"` + UnitID uuid.UUID `json:"unit_id"` + Type TimeseriesType `json:"type"` +} + +func (q *Queries) TimeseriesCreate(ctx context.Context, arg TimeseriesCreateParams) (TimeseriesCreateRow, error) { + row := q.db.QueryRow(ctx, timeseriesCreate, + arg.InstrumentID, + arg.Name, + arg.ParameterID, + arg.UnitID, + arg.Type, + ) + var i TimeseriesCreateRow + err := row.Scan( + &i.ID, + &i.InstrumentID, + &i.Slug, + &i.Name, + &i.ParameterID, + &i.UnitID, + &i.Type, + ) + return i, err +} + +const timeseriesDelete = `-- name: TimeseriesDelete :exec +delete from timeseries where id = $1 +` + +func (q *Queries) TimeseriesDelete(ctx context.Context, id uuid.UUID) error { + _, err := q.db.Exec(ctx, timeseriesDelete, id) + return err +} + +const timeseriesGet = `-- name: TimeseriesGet :one +select id, slug, name, type, is_computed, variable, instrument_id, instrument_slug, instrument, parameter_id, parameter, unit_id, unit from v_timeseries where id=$1 +` + +func (q *Queries) TimeseriesGet(ctx context.Context, id uuid.UUID) (VTimeseries, error) { + row := q.db.QueryRow(ctx, timeseriesGet, id) + var i VTimeseries + err := row.Scan( + &i.ID, + &i.Slug, + &i.Name, + &i.Type, + &i.IsComputed, + &i.Variable, + &i.InstrumentID, + &i.InstrumentSlug, + &i.Instrument, + &i.ParameterID, + &i.Parameter, + &i.UnitID, + &i.Unit, + ) + return i, err +} + +const timeseriesGetAllBelongToProject = `-- name: TimeseriesGetAllBelongToProject :one +select not exists ( + select true + from timeseries ts + where not ts.instrument_id = any ( + select p.instrument_id + from project_instrument p + where p.project_id = $1 + ) + and ts.id = any($2::uuid[]) +) +` + +type TimeseriesGetAllBelongToProjectParams struct { + ProjectID uuid.UUID `json:"project_id"` + TimeseriesIds []uuid.UUID `json:"timeseries_ids"` +} + +func (q *Queries) TimeseriesGetAllBelongToProject(ctx context.Context, arg TimeseriesGetAllBelongToProjectParams) (bool, error) { + row := q.db.QueryRow(ctx, timeseriesGetAllBelongToProject, arg.ProjectID, arg.TimeseriesIds) + var not_exists bool + err := row.Scan(¬_exists) + return not_exists, err +} + +const timeseriesGetExistsStored = `-- name: TimeseriesGetExistsStored :one +select exists (select id from v_timeseries_stored where id = $1) +` + +func (q *Queries) TimeseriesGetExistsStored(ctx context.Context, id uuid.UUID) (bool, error) { + row := q.db.QueryRow(ctx, timeseriesGetExistsStored, id) + var exists bool + err := row.Scan(&exists) + return exists, err +} + +const timeseriesListForInstrument = `-- name: TimeseriesListForInstrument :many +select id, slug, name, type, is_computed, variable, instrument_id, instrument_slug, instrument, parameter_id, parameter, unit_id, unit from v_timeseries +where instrument_id = $1 +` + +func (q *Queries) TimeseriesListForInstrument(ctx context.Context, instrumentID uuid.UUID) ([]VTimeseries, error) { + rows, err := q.db.Query(ctx, timeseriesListForInstrument, instrumentID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []VTimeseries{} + for rows.Next() { + var i VTimeseries + if err := rows.Scan( + &i.ID, + &i.Slug, + &i.Name, + &i.Type, + &i.IsComputed, + &i.Variable, + &i.InstrumentID, + &i.InstrumentSlug, + &i.Instrument, + &i.ParameterID, + &i.Parameter, + &i.UnitID, + &i.Unit, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const timeseriesListForInstrumentGroup = `-- name: TimeseriesListForInstrumentGroup :many +select t.id, t.slug, t.name, t.type, t.is_computed, t.variable, t.instrument_id, t.instrument_slug, t.instrument, t.parameter_id, t.parameter, t.unit_id, t.unit from v_timeseries t +inner join instrument_group_instruments gi on gi.instrument_id = t.instrument_id +where gi.instrument_group_id = $1 +` + +func (q *Queries) TimeseriesListForInstrumentGroup(ctx context.Context, instrumentGroupID uuid.UUID) ([]VTimeseries, error) { + rows, err := q.db.Query(ctx, timeseriesListForInstrumentGroup, instrumentGroupID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []VTimeseries{} + for rows.Next() { + var i VTimeseries + if err := rows.Scan( + &i.ID, + &i.Slug, + &i.Name, + &i.Type, + &i.IsComputed, + &i.Variable, + &i.InstrumentID, + &i.InstrumentSlug, + &i.Instrument, + &i.ParameterID, + &i.Parameter, + &i.UnitID, + &i.Unit, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const timeseriesListForPlotConfig = `-- name: TimeseriesListForPlotConfig :many +select t.id, t.slug, t.name, t.type, t.is_computed, t.variable, t.instrument_id, t.instrument_slug, t.instrument, t.parameter_id, t.parameter, t.unit_id, t.unit from v_timeseries t +inner join plot_configuration_timeseries_trace pct on pct.timeseries_id = t.id +where pct.plot_configuration_id = $1 +` + +func (q *Queries) TimeseriesListForPlotConfig(ctx context.Context, plotConfigurationID *uuid.UUID) ([]VTimeseries, error) { + rows, err := q.db.Query(ctx, timeseriesListForPlotConfig, plotConfigurationID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []VTimeseries{} + for rows.Next() { + var i VTimeseries + if err := rows.Scan( + &i.ID, + &i.Slug, + &i.Name, + &i.Type, + &i.IsComputed, + &i.Variable, + &i.InstrumentID, + &i.InstrumentSlug, + &i.Instrument, + &i.ParameterID, + &i.Parameter, + &i.UnitID, + &i.Unit, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const timeseriesListForProject = `-- name: TimeseriesListForProject :many +select t.id, t.slug, t.name, t.type, t.is_computed, t.variable, t.instrument_id, t.instrument_slug, t.instrument, t.parameter_id, t.parameter, t.unit_id, t.unit from v_timeseries t +inner join project_instrument p on p.instrument_id = t.instrument_id +where p.project_id = $1 +` + +func (q *Queries) TimeseriesListForProject(ctx context.Context, projectID uuid.UUID) ([]VTimeseries, error) { + rows, err := q.db.Query(ctx, timeseriesListForProject, projectID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []VTimeseries{} + for rows.Next() { + var i VTimeseries + if err := rows.Scan( + &i.ID, + &i.Slug, + &i.Name, + &i.Type, + &i.IsComputed, + &i.Variable, + &i.InstrumentID, + &i.InstrumentSlug, + &i.Instrument, + &i.ParameterID, + &i.Parameter, + &i.UnitID, + &i.Unit, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const timeseriesUpdate = `-- name: TimeseriesUpdate :exec +update timeseries set name=$2, instrument_id=$3, parameter_id=$4, unit_id=$5 +where id = $1 +` + +type TimeseriesUpdateParams struct { + ID uuid.UUID `json:"id"` + Name string `json:"name"` + InstrumentID *uuid.UUID `json:"instrument_id"` + ParameterID uuid.UUID `json:"parameter_id"` + UnitID uuid.UUID `json:"unit_id"` +} + +func (q *Queries) TimeseriesUpdate(ctx context.Context, arg TimeseriesUpdateParams) error { + _, err := q.db.Exec(ctx, timeseriesUpdate, + arg.ID, + arg.Name, + arg.InstrumentID, + arg.ParameterID, + arg.UnitID, + ) + return err +} diff --git a/api/internal/db/timeseries_calculated.sql_gen.go b/api/internal/db/timeseries_calculated.sql_gen.go new file mode 100644 index 00000000..a4ed115d --- /dev/null +++ b/api/internal/db/timeseries_calculated.sql_gen.go @@ -0,0 +1,194 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.27.0 +// source: timeseries_calculated.sql + +package db + +import ( + "context" + + "github.com/google/uuid" +) + +const calculationCreate = `-- name: CalculationCreate :exec +insert into calculation (timeseries_id, contents) values ($1,$2) +` + +type CalculationCreateParams struct { + TimeseriesID uuid.UUID `json:"timeseries_id"` + Contents *string `json:"contents"` +} + +func (q *Queries) CalculationCreate(ctx context.Context, arg CalculationCreateParams) error { + _, err := q.db.Exec(ctx, calculationCreate, arg.TimeseriesID, arg.Contents) + return err +} + +const calculationUpdate = `-- name: CalculationUpdate :exec +update calculation set contents=$2 where timeseries_id=$1 +` + +type CalculationUpdateParams struct { + TimeseriesID uuid.UUID `json:"timeseries_id"` + Contents *string `json:"contents"` +} + +func (q *Queries) CalculationUpdate(ctx context.Context, arg CalculationUpdateParams) error { + _, err := q.db.Exec(ctx, calculationUpdate, arg.TimeseriesID, arg.Contents) + return err +} + +const timeseriesComputedCreate = `-- name: TimeseriesComputedCreate :one + +insert into timeseries (instrument_id, parameter_id, unit_id, slug, name, type) +values ($1, $2, $3, slugify($4, 'timeseries'), $4, 'computed') +returning id +` + +type TimeseriesComputedCreateParams struct { + InstrumentID *uuid.UUID `json:"instrument_id"` + ParameterID uuid.UUID `json:"parameter_id"` + UnitID uuid.UUID `json:"unit_id"` + Name string `json:"name"` +} + +// the below queried are needed becuase the slug is currently used as the variable name, it would +// be better if we used a generated column for this on the timeseries table, maybe converted to snake_case +func (q *Queries) TimeseriesComputedCreate(ctx context.Context, arg TimeseriesComputedCreateParams) (uuid.UUID, error) { + row := q.db.QueryRow(ctx, timeseriesComputedCreate, + arg.InstrumentID, + arg.ParameterID, + arg.UnitID, + arg.Name, + ) + var id uuid.UUID + err := row.Scan(&id) + return id, err +} + +const timeseriesComputedDelete = `-- name: TimeseriesComputedDelete :exec +delete from timeseries where id = $1 and id = any(select timeseries_id from calculation) +` + +func (q *Queries) TimeseriesComputedDelete(ctx context.Context, id uuid.UUID) error { + _, err := q.db.Exec(ctx, timeseriesComputedDelete, id) + return err +} + +const timeseriesComputedGet = `-- name: TimeseriesComputedGet :one +select + id, + instrument_id, + parameter_id, + unit_id, + slug, + name as formula_name, + coalesce(contents, '') as formula +from v_timeseries_computed +where id = $1 +` + +type TimeseriesComputedGetRow struct { + ID uuid.UUID `json:"id"` + InstrumentID *uuid.UUID `json:"instrument_id"` + ParameterID uuid.UUID `json:"parameter_id"` + UnitID uuid.UUID `json:"unit_id"` + Slug string `json:"slug"` + FormulaName string `json:"formula_name"` + Formula string `json:"formula"` +} + +func (q *Queries) TimeseriesComputedGet(ctx context.Context, id uuid.UUID) (TimeseriesComputedGetRow, error) { + row := q.db.QueryRow(ctx, timeseriesComputedGet, id) + var i TimeseriesComputedGetRow + err := row.Scan( + &i.ID, + &i.InstrumentID, + &i.ParameterID, + &i.UnitID, + &i.Slug, + &i.FormulaName, + &i.Formula, + ) + return i, err +} + +const timeseriesComputedListForInstrument = `-- name: TimeseriesComputedListForInstrument :many +select + id, + instrument_id, + parameter_id, + unit_id, + slug, + name as formula_name, + coalesce(contents, '') as formula +from v_timeseries_computed +where instrument_id = $1 +` + +type TimeseriesComputedListForInstrumentRow struct { + ID uuid.UUID `json:"id"` + InstrumentID *uuid.UUID `json:"instrument_id"` + ParameterID uuid.UUID `json:"parameter_id"` + UnitID uuid.UUID `json:"unit_id"` + Slug string `json:"slug"` + FormulaName string `json:"formula_name"` + Formula string `json:"formula"` +} + +func (q *Queries) TimeseriesComputedListForInstrument(ctx context.Context, instrumentID *uuid.UUID) ([]TimeseriesComputedListForInstrumentRow, error) { + rows, err := q.db.Query(ctx, timeseriesComputedListForInstrument, instrumentID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []TimeseriesComputedListForInstrumentRow{} + for rows.Next() { + var i TimeseriesComputedListForInstrumentRow + if err := rows.Scan( + &i.ID, + &i.InstrumentID, + &i.ParameterID, + &i.UnitID, + &i.Slug, + &i.FormulaName, + &i.Formula, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const timeseriesComputedUpdate = `-- name: TimeseriesComputedUpdate :exec +update timeseries set + parameter_id=$2, + unit_id=$3, + slug=$4, + name=$5 +where id = $1 +` + +type TimeseriesComputedUpdateParams struct { + ID uuid.UUID `json:"id"` + ParameterID uuid.UUID `json:"parameter_id"` + UnitID uuid.UUID `json:"unit_id"` + Slug string `json:"slug"` + Name string `json:"name"` +} + +func (q *Queries) TimeseriesComputedUpdate(ctx context.Context, arg TimeseriesComputedUpdateParams) error { + _, err := q.db.Exec(ctx, timeseriesComputedUpdate, + arg.ID, + arg.ParameterID, + arg.UnitID, + arg.Slug, + arg.Name, + ) + return err +} diff --git a/api/internal/db/timeseries_cwms.sql_gen.go b/api/internal/db/timeseries_cwms.sql_gen.go new file mode 100644 index 00000000..1d7d9628 --- /dev/null +++ b/api/internal/db/timeseries_cwms.sql_gen.go @@ -0,0 +1,138 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.27.0 +// source: timeseries_cwms.sql + +package db + +import ( + "context" + "time" + + "github.com/google/uuid" +) + +const timeseriesCwmsCreate = `-- name: TimeseriesCwmsCreate :exec +insert into timeseries_cwms (timeseries_id, cwms_timeseries_id, cwms_office_id, cwms_extent_earliest_time, cwms_extent_latest_time) values +($1, $2, $3, $4, $5) +` + +type TimeseriesCwmsCreateParams struct { + TimeseriesID uuid.UUID `json:"timeseries_id"` + CwmsTimeseriesID string `json:"cwms_timeseries_id"` + CwmsOfficeID string `json:"cwms_office_id"` + CwmsExtentEarliestTime time.Time `json:"cwms_extent_earliest_time"` + CwmsExtentLatestTime *time.Time `json:"cwms_extent_latest_time"` +} + +func (q *Queries) TimeseriesCwmsCreate(ctx context.Context, arg TimeseriesCwmsCreateParams) error { + _, err := q.db.Exec(ctx, timeseriesCwmsCreate, + arg.TimeseriesID, + arg.CwmsTimeseriesID, + arg.CwmsOfficeID, + arg.CwmsExtentEarliestTime, + arg.CwmsExtentLatestTime, + ) + return err +} + +const timeseriesCwmsGet = `-- name: TimeseriesCwmsGet :one +select id, slug, name, type, is_computed, variable, instrument_id, instrument_slug, instrument, parameter_id, parameter, unit_id, unit, cwms_timeseries_id, cwms_office_id, cwms_extent_earliest_time, cwms_extent_latest_time from v_timeseries_cwms +where id = $1 +` + +func (q *Queries) TimeseriesCwmsGet(ctx context.Context, id uuid.UUID) (VTimeseriesCwms, error) { + row := q.db.QueryRow(ctx, timeseriesCwmsGet, id) + var i VTimeseriesCwms + err := row.Scan( + &i.ID, + &i.Slug, + &i.Name, + &i.Type, + &i.IsComputed, + &i.Variable, + &i.InstrumentID, + &i.InstrumentSlug, + &i.Instrument, + &i.ParameterID, + &i.Parameter, + &i.UnitID, + &i.Unit, + &i.CwmsTimeseriesID, + &i.CwmsOfficeID, + &i.CwmsExtentEarliestTime, + &i.CwmsExtentLatestTime, + ) + return i, err +} + +const timeseriesCwmsList = `-- name: TimeseriesCwmsList :many +select id, slug, name, type, is_computed, variable, instrument_id, instrument_slug, instrument, parameter_id, parameter, unit_id, unit, cwms_timeseries_id, cwms_office_id, cwms_extent_earliest_time, cwms_extent_latest_time from v_timeseries_cwms +where instrument_id = $1 +` + +func (q *Queries) TimeseriesCwmsList(ctx context.Context, instrumentID uuid.UUID) ([]VTimeseriesCwms, error) { + rows, err := q.db.Query(ctx, timeseriesCwmsList, instrumentID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []VTimeseriesCwms{} + for rows.Next() { + var i VTimeseriesCwms + if err := rows.Scan( + &i.ID, + &i.Slug, + &i.Name, + &i.Type, + &i.IsComputed, + &i.Variable, + &i.InstrumentID, + &i.InstrumentSlug, + &i.Instrument, + &i.ParameterID, + &i.Parameter, + &i.UnitID, + &i.Unit, + &i.CwmsTimeseriesID, + &i.CwmsOfficeID, + &i.CwmsExtentEarliestTime, + &i.CwmsExtentLatestTime, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const timeseriesCwmsUpdate = `-- name: TimeseriesCwmsUpdate :exec +update timeseries_cwms set + cwms_timeseries_id=$2, + cwms_office_id=$3, + cwms_extent_earliest_time=$4, + cwms_extent_latest_time=$5 +where timeseries_id=$1 +` + +type TimeseriesCwmsUpdateParams struct { + TimeseriesID uuid.UUID `json:"timeseries_id"` + CwmsTimeseriesID string `json:"cwms_timeseries_id"` + CwmsOfficeID string `json:"cwms_office_id"` + CwmsExtentEarliestTime time.Time `json:"cwms_extent_earliest_time"` + CwmsExtentLatestTime *time.Time `json:"cwms_extent_latest_time"` +} + +func (q *Queries) TimeseriesCwmsUpdate(ctx context.Context, arg TimeseriesCwmsUpdateParams) error { + _, err := q.db.Exec(ctx, timeseriesCwmsUpdate, + arg.TimeseriesID, + arg.CwmsTimeseriesID, + arg.CwmsOfficeID, + arg.CwmsExtentEarliestTime, + arg.CwmsExtentLatestTime, + ) + return err +} diff --git a/api/internal/db/timeseries_process.manual.go b/api/internal/db/timeseries_process.manual.go new file mode 100644 index 00000000..97a19ac2 --- /dev/null +++ b/api/internal/db/timeseries_process.manual.go @@ -0,0 +1,426 @@ +package db + +import ( + "context" + "encoding/json" + "fmt" + "math" + "strconv" + "strings" + "time" + + "github.com/Knetic/govaluate" + "github.com/USACE/instrumentation-api/api/internal/util" + "github.com/google/uuid" + "github.com/jackc/pgx/v5" + "github.com/tidwall/btree" +) + +type ProcessTimeseriesInfo struct { + TimeseriesID uuid.UUID `json:"timeseries_id" db:"timeseries_id"` + InstrumentID uuid.UUID `json:"instrument_id" db:"instrument_id"` + Variable string `json:"variable" db:"variable"` + IsComputed bool `json:"is_computed" db:"is_computed"` + Formula *string `json:"formula" db:"formula"` +} + +type DBProcessTimeseries struct { + ProcessTimeseriesInfo + Measurements string `json:"measurements" db:"measurements"` + NextMeasurementLow *string `json:"next_measurement_low" db:"next_measurement_low"` + NextMeasurementHigh *string `json:"next_measurement_high" db:"next_measurement_high"` +} + +type ProcessTimeseries struct { + ProcessTimeseriesInfo + Measurements []ProcessMeasurement `json:"measurements" db:"measurements"` + NextMeasurementLow *ProcessMeasurement `json:"next_measurement_low" db:"next_measurement_low"` + NextMeasurementHigh *ProcessMeasurement `json:"next_measurement_high" db:"next_measurement_high"` + TimeWindow util.TimeWindow `json:"time_window" db:"-"` +} + +type ProcessMeasurementCollection struct { + TimeseriesID uuid.UUID `json:"timeseries_id" db:"timeseries_id"` + Items []ProcessMeasurement `json:"items"` +} + +type ProcessTimeseriesResponseCollection []ProcessTimeseries + +type ProcessMeasurement struct { + Time time.Time `json:"time" db:"time"` + Value float64 `json:"value" db:"value"` + Error string `json:"error,omitempty" db:"-"` +} + +func (m ProcessMeasurement) Lean() map[time.Time]float64 { + return map[time.Time]float64{m.Time: m.Value} +} + +// ProcessMeasurementFilter for conveniently passsing SQL query paramters to functions +type ProcessMeasurementFilter struct { + TimeseriesID *uuid.UUID `db:"timeseries_id"` + InstrumentID *uuid.UUID `db:"instrument_id"` + InstrumentGroupID *uuid.UUID `db:"instrument_group_id"` + InstrumentIDs []uuid.UUID `db:"instrument_ids"` + TimeseriesIDs []uuid.UUID `db:"timeseries_ids"` + After time.Time `db:"after"` + Before time.Time `db:"before"` +} + +type TimeseriesMeasurementCollectionCollection struct { + Items []MeasurementCollection +} + +func (cc *TimeseriesMeasurementCollectionCollection) TimeseriesIDs() map[uuid.UUID]struct{} { + dd := make(map[uuid.UUID]struct{}) + for _, item := range cc.Items { + dd[item.TimeseriesID] = struct{}{} + } + return dd +} + +type FloatNanInf float64 + +func (j FloatNanInf) MarshalJSON() ([]byte, error) { + if math.IsNaN(float64(j)) || math.IsInf(float64(j), 0) { + return []byte("null"), nil + } + + return []byte(fmt.Sprintf("%f", float64(j))), nil +} + +func (j *FloatNanInf) UnmarshalJSON(v []byte) error { + switch strings.ToLower(string(v)) { + case `"nan"`, "nan", "", "null", "undefined": + *j = FloatNanInf(math.NaN()) + case `"inf"`, "inf": + *j = FloatNanInf(math.Inf(1)) + default: + var fv float64 + if err := json.Unmarshal(v, &fv); err != nil { + *j = FloatNanInf(math.NaN()) + return nil + } + *j = FloatNanInf(fv) + } + return nil +} + +type MeasurementCollection struct { + TimeseriesID uuid.UUID `json:"timeseries_id" db:"timeseries_id"` + Items []Measurement `json:"items"` +} + +// BTreeNode represents node for btree used for computing timeseries +type BTreeNode struct { + Key time.Time + Value map[string]interface{} +} + +type MeasurementCollectionLean struct { + TimeseriesID uuid.UUID `json:"timeseries_id" db:"timeseries_id"` + Items []MeasurementLean `json:"items"` +} + +type MeasurementLean map[time.Time]float64 + +func (mrc *ProcessTimeseriesResponseCollection) GroupByInstrument(threshold int) (map[uuid.UUID][]MeasurementCollectionLean, error) { + if len(*mrc) == 0 { + return make(map[uuid.UUID][]MeasurementCollectionLean), nil + } + + tmp := make(map[uuid.UUID]map[uuid.UUID][]MeasurementLean) + + for _, t := range *mrc { + if _, hasInstrument := tmp[t.InstrumentID]; !hasInstrument { + tmp[t.InstrumentID] = make(map[uuid.UUID][]MeasurementLean, 0) + } + if _, hasTimeseries := tmp[t.InstrumentID][t.TimeseriesID]; !hasTimeseries { + tmp[t.InstrumentID][t.TimeseriesID] = make([]MeasurementLean, 0) + } + for _, m := range t.Measurements { + tmp[t.InstrumentID][t.TimeseriesID] = append(tmp[t.InstrumentID][t.TimeseriesID], MeasurementLean{m.Time: m.Value}) + } + } + + res := make(map[uuid.UUID][]MeasurementCollectionLean) + + for instrumentID := range tmp { + res[instrumentID] = make([]MeasurementCollectionLean, 0) + + for tsID := range tmp[instrumentID] { + res[instrumentID] = append(res[instrumentID], + MeasurementCollectionLean{ + TimeseriesID: tsID, + Items: LTTB(tmp[instrumentID][tsID], threshold), + }, + ) + } + } + + return res, nil +} + +func (mrc *ProcessTimeseriesResponseCollection) CollectSingleTimeseries(threshold int, tsID uuid.UUID) (MeasurementCollection, error) { + if len(*mrc) == 0 { + return MeasurementCollection{ + TimeseriesID: tsID, + Items: make([]Measurement, 0), + }, nil + } + + for _, t := range *mrc { + if t.TimeseriesID == tsID { + mmts := make([]Measurement, len(t.Measurements)) + for i, m := range t.Measurements { + mmts[i] = Measurement{ + Time: m.Time, + Value: m.Value, + Error: m.Error, + } + } + return MeasurementCollection{TimeseriesID: t.TimeseriesID, Items: LTTB(mmts, threshold)}, nil + } + } + + return MeasurementCollection{}, fmt.Errorf("requested timeseries does not match any in the result") +} + +// SelectMeasurements returns measurements for the timeseries specified in the filter +func (q *Queries) ProcessMeasurementListDynamic(ctx context.Context, f ProcessMeasurementFilter) (ProcessTimeseriesResponseCollection, error) { + tss, err := queryTimeseriesMeasurements(ctx, q, f) + if err != nil { + return tss, err + } + tss, err = processLOCF(tss) + if err != nil { + return tss, err + } + return tss, nil +} + +// collectAggregate creates a btree of all sorted times (key) and measurements (value; as variable map) from an array of Timeseries +func collectAggregate(tss *ProcessTimeseriesResponseCollection) *btree.BTreeG[BTreeNode] { + // Get unique set of all measurement times of timeseries dependencies for non-regularized values + btm := btree.NewBTreeG(func(a, b BTreeNode) bool { return a.Key.Before(b.Key) }) + for _, ts := range *tss { + if ts.NextMeasurementLow != nil { + if item, exists := btm.Get(BTreeNode{Key: ts.NextMeasurementLow.Time}); !exists { + btm.Set(BTreeNode{Key: ts.NextMeasurementLow.Time, Value: map[string]interface{}{ts.Variable: ts.NextMeasurementLow.Value}}) + } else { + item.Value[ts.Variable] = ts.NextMeasurementLow.Value + btm.Set(item) + } + } + for _, m := range ts.Measurements { + if item, exists := btm.Get(BTreeNode{Key: m.Time}); !exists { + btm.Set(BTreeNode{Key: m.Time, Value: map[string]interface{}{ts.Variable: m.Value}}) + } else { + item.Value[ts.Variable] = m.Value + btm.Set(item) + } + } + if ts.NextMeasurementHigh != nil { + if item, exists := btm.Get(BTreeNode{Key: ts.NextMeasurementHigh.Time}); !exists { + btm.Set(BTreeNode{Key: ts.NextMeasurementHigh.Time, Value: map[string]interface{}{ts.Variable: ts.NextMeasurementHigh.Value}}) + } else { + item.Value[ts.Variable] = ts.NextMeasurementHigh.Value + btm.Set(item) + } + } + } + return btm +} + +// processLOCF calculates computed timeseries using "Last-Observation-Carried-Forward" algorithm +func processLOCF(tss ProcessTimeseriesResponseCollection) (ProcessTimeseriesResponseCollection, error) { + tssFinal := make(ProcessTimeseriesResponseCollection, 0) + var variableMap *btree.BTreeG[BTreeNode] + // Check if any computed timeseries present, collect aggregates used for calculations if so + for _, ts := range tss { + if ts.IsComputed { + variableMap = collectAggregate(&tss) + break + } + } + // Add any stored timeseries to the result + // Do calculations for computed timeseries and add to result + for _, ts := range tss { + // Array of existing measurements + a1 := make([]ProcessMeasurement, 0) + if ts.NextMeasurementLow != nil { + a1 = append(a1, *ts.NextMeasurementLow) + } + a1 = append(a1, ts.Measurements...) + if ts.NextMeasurementHigh != nil { + a1 = append(a1, *ts.NextMeasurementHigh) + } + + // Could do some additional checks before adding, like if the + // timeseries was actual requested or if it was just in the result as a + // dependency of the computed timeseries, just returning them all for now + if !ts.IsComputed { + tssFinal = append(tssFinal, ProcessTimeseries{ + ProcessTimeseriesInfo: ts.ProcessTimeseriesInfo, + Measurements: a1, + TimeWindow: ts.TimeWindow, + }) + continue + } + + // By now, all of the stored timeseries have been processed; + // the query is ordered in a way that priortizes stored timeseries + expr, err := govaluate.NewEvaluableExpression(*ts.Formula) + if err != nil { + continue + } + + // Do calculations + remember := make(map[string]interface{}) + a2 := make([]ProcessMeasurement, 0) + + it := variableMap.Iter() + for it.Next() { + item := it.Item() + + // fill in any missing gaps of data + for k, v := range remember { + if _, exists := item.Value[k]; !exists { + item.Value[k] = v + } + } + // Add/Update the most recent values + for k, v := range item.Value { + remember[k] = v + } + + val, err := expr.Evaluate(item.Value) + if err != nil { + continue + } + val64, err := strconv.ParseFloat(fmt.Sprint(val), 64) + if err != nil { + continue + } + + a2 = append(a2, ProcessMeasurement{Time: item.Key, Value: val64}) + } + it.Release() + + tssFinal = append(tssFinal, ProcessTimeseries{ + ProcessTimeseriesInfo: ts.ProcessTimeseriesInfo, + Measurements: a2, + TimeWindow: ts.TimeWindow, + }) + } + + return tssFinal, nil +} + +// queryTimeseriesMeasurements selects stored measurements and dependencies for computed measurements +func queryTimeseriesMeasurements(ctx context.Context, q *Queries, f ProcessMeasurementFilter) (ProcessTimeseriesResponseCollection, error) { + var filterSQL string + var filterArg interface{} + // short circuiting before executing SQL query greatly improves query perfomance, + // rather than adding all parameters to the query with logical OR + switch { + case f.TimeseriesID != nil: + filterSQL = `id=$1` + filterArg = f.TimeseriesID + case f.InstrumentID != nil: + filterSQL = `instrument_id=$1` + filterArg = f.InstrumentID + case f.InstrumentGroupID != nil: + filterSQL = ` + instrument_id = any( + SELECT instrument_id + FROM instrument_group_instruments + WHERE instrument_group_id=$1 + )` + filterArg = f.InstrumentGroupID + case len(f.InstrumentIDs) > 0: + filterSQL = `instrument_id = any($1)` + filterArg = f.InstrumentIDs + case len(f.TimeseriesIDs) > 0: + filterSQL = `id = any($1)` + filterArg = f.TimeseriesIDs + default: + return nil, fmt.Errorf("must supply valid filter for timeseries_measurement query") + } + + listTimeseriesMeasurments := ` + with required_timeseries as ( + ( + select id + from v_timeseries_stored + where ` + filterSQL + ` + ) + union all + ( + select dependency_timeseries_id as id + from v_timeseries_dependency + where ` + filterSQL + ` + ) + ), next_low as ( + select nlm.timeseries_id as timeseries_id, json_build_object('time', nlm.time, 'value', m1.value) measurement + from ( + select timeseries_id, max("time") "time" + from timeseries_measurement + where timeseries_id in (select id from required_timeseries) and "time" < $2 + group by timeseries_id + ) nlm + inner join timeseries_measurement m1 on m1.time = nlm.time and m1.timeseries_id = nlm.timeseries_id + ), next_high as ( + select nhm.timeseries_id as timeseries_id, json_build_object('time', nhm.time, 'value', m2.value) measurement + from ( + select timeseries_id, min("time") "time" + from timeseries_measurement + where timeseries_id in (select id from required_timeseries) and "time" > $3 + group by timeseries_id + ) nhm + inner join timeseries_measurement m2 on m2.time = nhm.time and m2.timeseries_id = nhm.timeseries_id + ) + ( + select + rt.id timeseries_id, + ts.instrument_id, + i.slug || '.' || ts.slug variable, + false is_computed, + null formula, + coalesce(( + select json_agg(json_build_object('time', "time", 'value', value) order by time asc) + from timeseries_measurement + where timeseries_id = rt.id and "time" >= $2 and "time" <= $3 + ), '[]'::json) measurements, + nl.measurement next_measurement_low, + nh.measurement next_measurement_high + from required_timeseries rt + inner join timeseries ts on ts.id = rt.id + inner join instrument i on i.id = ts.instrument_id + left join next_low nl on nl.timeseries_id = rt.id + left join next_high nh on nh.timeseries_id = rt.id + ) + union all + ( + select + id timeseries_id, + instrument_id, + slug variable, + true is_computed, + contents formula, + '[]'::json measurements, + null next_measurement_low, + null next_measurement_high + from v_timeseries_computed + where ` + filterSQL + ` + and contents is not null + ) + order by is_computed + ` + + rows, err := q.db.Query(ctx, listTimeseriesMeasurments, filterArg, f.After, f.Before) + if err != nil { + return make(ProcessTimeseriesResponseCollection, 0), err + } + return pgx.CollectRows[ProcessTimeseries](rows, pgx.RowToStructByNameLax) +} diff --git a/api/internal/db/unit.sql_gen.go b/api/internal/db/unit.sql_gen.go new file mode 100644 index 00000000..930c2f45 --- /dev/null +++ b/api/internal/db/unit.sql_gen.go @@ -0,0 +1,44 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.27.0 +// source: unit.sql + +package db + +import ( + "context" +) + +const unitsList = `-- name: UnitsList :many +select id, name, abbreviation, unit_family_id, unit_family, measure_id, measure +from v_unit +order by name +` + +func (q *Queries) UnitsList(ctx context.Context) ([]VUnit, error) { + rows, err := q.db.Query(ctx, unitsList) + if err != nil { + return nil, err + } + defer rows.Close() + items := []VUnit{} + for rows.Next() { + var i VUnit + if err := rows.Scan( + &i.ID, + &i.Name, + &i.Abbreviation, + &i.UnitFamilyID, + &i.UnitFamily, + &i.MeasureID, + &i.Measure, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} diff --git a/api/internal/db/uploader.sql_gen.go b/api/internal/db/uploader.sql_gen.go new file mode 100644 index 00000000..7b6861cb --- /dev/null +++ b/api/internal/db/uploader.sql_gen.go @@ -0,0 +1,284 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.27.0 +// source: uploader.sql + +package db + +import ( + "context" + "time" + + "github.com/google/uuid" +) + +const uploaderConfigCreate = `-- name: UploaderConfigCreate :one +insert into uploader_config ( + project_id, + name, + slug, + description, + created_at, + created_by, + type, + tz_name, + time_field, + validated_field_enabled, + validated_field, + masked_field_enabled, + masked_field, + comment_field_enabled, + comment_field, + column_offset, + row_offset +) +values ( + $1, + $2, + slugify($3, 'uploader_config'), + $4, + $5, + $6, + $7, + $8, + $9, + $10, + $11, + $12, + $13, + $14, + $15, + $16, + $17 +) +returning id +` + +type UploaderConfigCreateParams struct { + ProjectID uuid.UUID `json:"project_id"` + Name string `json:"name"` + Slug string `json:"slug"` + Description string `json:"description"` + CreatedAt time.Time `json:"created_at"` + CreatedBy uuid.UUID `json:"created_by"` + Type UploaderConfigType `json:"type"` + TzName string `json:"tz_name"` + TimeField string `json:"time_field"` + ValidatedFieldEnabled bool `json:"validated_field_enabled"` + ValidatedField *string `json:"validated_field"` + MaskedFieldEnabled bool `json:"masked_field_enabled"` + MaskedField *string `json:"masked_field"` + CommentFieldEnabled bool `json:"comment_field_enabled"` + CommentField *string `json:"comment_field"` + ColumnOffset int32 `json:"column_offset"` + RowOffset int32 `json:"row_offset"` +} + +func (q *Queries) UploaderConfigCreate(ctx context.Context, arg UploaderConfigCreateParams) (uuid.UUID, error) { + row := q.db.QueryRow(ctx, uploaderConfigCreate, + arg.ProjectID, + arg.Name, + arg.Slug, + arg.Description, + arg.CreatedAt, + arg.CreatedBy, + arg.Type, + arg.TzName, + arg.TimeField, + arg.ValidatedFieldEnabled, + arg.ValidatedField, + arg.MaskedFieldEnabled, + arg.MaskedField, + arg.CommentFieldEnabled, + arg.CommentField, + arg.ColumnOffset, + arg.RowOffset, + ) + var id uuid.UUID + err := row.Scan(&id) + return id, err +} + +const uploaderConfigDelete = `-- name: UploaderConfigDelete :exec +delete from uploader_config where id=$1 +` + +func (q *Queries) UploaderConfigDelete(ctx context.Context, id uuid.UUID) error { + _, err := q.db.Exec(ctx, uploaderConfigDelete, id) + return err +} + +const uploaderConfigGet = `-- name: UploaderConfigGet :one +select id, project_id, slug, name, description, created_at, created_by, created_by_username, updated_by, updated_at, updated_by_username, type, tz_name, time_field, validated_field_enabled, validated_field, masked_field_enabled, masked_field, comment_field_enabled, comment_field, column_offset, row_offset from v_uploader_config where id=$1 +` + +func (q *Queries) UploaderConfigGet(ctx context.Context, id uuid.UUID) (VUploaderConfig, error) { + row := q.db.QueryRow(ctx, uploaderConfigGet, id) + var i VUploaderConfig + err := row.Scan( + &i.ID, + &i.ProjectID, + &i.Slug, + &i.Name, + &i.Description, + &i.CreatedAt, + &i.CreatedBy, + &i.CreatedByUsername, + &i.UpdatedBy, + &i.UpdatedAt, + &i.UpdatedByUsername, + &i.Type, + &i.TzName, + &i.TimeField, + &i.ValidatedFieldEnabled, + &i.ValidatedField, + &i.MaskedFieldEnabled, + &i.MaskedField, + &i.CommentFieldEnabled, + &i.CommentField, + &i.ColumnOffset, + &i.RowOffset, + ) + return i, err +} + +const uploaderConfigListForProject = `-- name: UploaderConfigListForProject :many +select id, project_id, slug, name, description, created_at, created_by, created_by_username, updated_by, updated_at, updated_by_username, type, tz_name, time_field, validated_field_enabled, validated_field, masked_field_enabled, masked_field, comment_field_enabled, comment_field, column_offset, row_offset from v_uploader_config where project_id=$1 +` + +func (q *Queries) UploaderConfigListForProject(ctx context.Context, projectID uuid.UUID) ([]VUploaderConfig, error) { + rows, err := q.db.Query(ctx, uploaderConfigListForProject, projectID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []VUploaderConfig{} + for rows.Next() { + var i VUploaderConfig + if err := rows.Scan( + &i.ID, + &i.ProjectID, + &i.Slug, + &i.Name, + &i.Description, + &i.CreatedAt, + &i.CreatedBy, + &i.CreatedByUsername, + &i.UpdatedBy, + &i.UpdatedAt, + &i.UpdatedByUsername, + &i.Type, + &i.TzName, + &i.TimeField, + &i.ValidatedFieldEnabled, + &i.ValidatedField, + &i.MaskedFieldEnabled, + &i.MaskedField, + &i.CommentFieldEnabled, + &i.CommentField, + &i.ColumnOffset, + &i.RowOffset, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const uploaderConfigMappingDeleteForUploaderConfig = `-- name: UploaderConfigMappingDeleteForUploaderConfig :exec +delete from uploader_config_mapping where uploader_config_id=$1 +` + +func (q *Queries) UploaderConfigMappingDeleteForUploaderConfig(ctx context.Context, uploaderConfigID uuid.UUID) error { + _, err := q.db.Exec(ctx, uploaderConfigMappingDeleteForUploaderConfig, uploaderConfigID) + return err +} + +const uploaderConfigMappingList = `-- name: UploaderConfigMappingList :many +select uploader_config_id, field_name, timeseries_id from uploader_config_mapping where uploader_config_id=$1 +` + +func (q *Queries) UploaderConfigMappingList(ctx context.Context, uploaderConfigID uuid.UUID) ([]UploaderConfigMapping, error) { + rows, err := q.db.Query(ctx, uploaderConfigMappingList, uploaderConfigID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []UploaderConfigMapping{} + for rows.Next() { + var i UploaderConfigMapping + if err := rows.Scan(&i.UploaderConfigID, &i.FieldName, &i.TimeseriesID); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const uploaderConfigUpdate = `-- name: UploaderConfigUpdate :exec +update uploader_config set + name=$2, + description=$3, + updated_by=$4, + updated_at=$5, + type=$6, + tz_name=$7, + time_field=$8, + validated_field_enabled=$9, + validated_field=$10, + masked_field_enabled=$11, + masked_field=$12, + comment_field_enabled=$13, + comment_field=$14, + column_offset=$15, + row_offset=$16 +where id=$1 +` + +type UploaderConfigUpdateParams struct { + ID uuid.UUID `json:"id"` + Name string `json:"name"` + Description string `json:"description"` + UpdatedBy *uuid.UUID `json:"updated_by"` + UpdatedAt *time.Time `json:"updated_at"` + Type UploaderConfigType `json:"type"` + TzName string `json:"tz_name"` + TimeField string `json:"time_field"` + ValidatedFieldEnabled bool `json:"validated_field_enabled"` + ValidatedField *string `json:"validated_field"` + MaskedFieldEnabled bool `json:"masked_field_enabled"` + MaskedField *string `json:"masked_field"` + CommentFieldEnabled bool `json:"comment_field_enabled"` + CommentField *string `json:"comment_field"` + ColumnOffset int32 `json:"column_offset"` + RowOffset int32 `json:"row_offset"` +} + +func (q *Queries) UploaderConfigUpdate(ctx context.Context, arg UploaderConfigUpdateParams) error { + _, err := q.db.Exec(ctx, uploaderConfigUpdate, + arg.ID, + arg.Name, + arg.Description, + arg.UpdatedBy, + arg.UpdatedAt, + arg.Type, + arg.TzName, + arg.TimeField, + arg.ValidatedFieldEnabled, + arg.ValidatedField, + arg.MaskedFieldEnabled, + arg.MaskedField, + arg.CommentFieldEnabled, + arg.CommentField, + arg.ColumnOffset, + arg.RowOffset, + ) + return err +} diff --git a/api/internal/dto/alert.go b/api/internal/dto/alert.go new file mode 100644 index 00000000..341c73a7 --- /dev/null +++ b/api/internal/dto/alert.go @@ -0,0 +1,19 @@ +package dto + +import ( + "time" + + "github.com/google/uuid" +) + +type Alert struct { + Read *bool `json:"read,omitempty"` + ID uuid.UUID `json:"id"` + AlertConfigID uuid.UUID `json:"alert_config_id"` + ProjectID uuid.UUID `json:"project_id"` + ProjectName string `json:"project_name"` + Name string `json:"name"` + Body string `json:"body"` + CreatedAt time.Time `json:"created_at"` + Instruments []AlertConfigInstrument `json:"instruments"` +} diff --git a/api/internal/dto/alert_check.go b/api/internal/dto/alert_check.go new file mode 100644 index 00000000..d6c21f0b --- /dev/null +++ b/api/internal/dto/alert_check.go @@ -0,0 +1,53 @@ +package dto + +import ( + "github.com/google/uuid" +) + +var ( + GreenSubmittalStatusID uuid.UUID = uuid.MustParse("0c0d6487-3f71-4121-8575-19514c7b9f03") + YellowSubmittalStatusID uuid.UUID = uuid.MustParse("ef9a3235-f6e2-4e6c-92f6-760684308f7f") + RedSubmittalStatusID uuid.UUID = uuid.MustParse("84a0f437-a20a-4ac2-8a5b-f8dc35e8489b") + + MeasurementSubmittalAlertTypeID uuid.UUID = uuid.MustParse("97e7a25c-d5c7-4ded-b272-1bb6e5914fe3") + EvaluationSubmittalAlertTypeID uuid.UUID = uuid.MustParse("da6ee89e-58cc-4d85-8384-43c3c33a68bd") +) + +const ( + warning = "Warning" + alert = "Alert" + reminder = "Reminder" +) + +type AlertCheck struct { + AlertConfigID uuid.UUID `db:"alert_config_id"` + SubmittalID uuid.UUID `db:"submittal_id"` + ShouldWarn bool `db:"should_warn"` + ShouldAlert bool `db:"should_alert"` + ShouldRemind bool `db:"should_remind"` + Submittal Submittal `db:"-"` +} + +func (ck AlertCheck) GetShouldWarn() bool { + return ck.ShouldWarn +} + +func (ck AlertCheck) GetShouldAlert() bool { + return ck.ShouldAlert +} + +func (ck AlertCheck) GetShouldRemind() bool { + return ck.ShouldRemind +} + +func (ck AlertCheck) GetSubmittal() Submittal { + return ck.Submittal +} + +func (ck *AlertCheck) SetSubmittal(sub Submittal) { + ck.Submittal = sub +} + +type AlertConfigMap map[uuid.UUID]AlertConfig + +type SubmittalMap map[uuid.UUID]Submittal diff --git a/api/internal/dto/alert_config.go b/api/internal/dto/alert_config.go new file mode 100644 index 00000000..81ba33bf --- /dev/null +++ b/api/internal/dto/alert_config.go @@ -0,0 +1,41 @@ +package dto + +import ( + "time" + + "github.com/google/uuid" +) + +type AlertConfig struct { + ID uuid.UUID `json:"id"` + Name string `json:"name"` + Body string `json:"body"` + ProjectID uuid.UUID `json:"project_id"` + ProjectName string `json:"project_name"` + AlertTypeID uuid.UUID `json:"alert_type_id"` + AlertType string `json:"alert_type"` + StartedAt time.Time `json:"started_at"` + ScheduleInterval string `json:"schedule_interval"` + RemindInterval string `json:"remind_interval"` + WarningInterval string `json:"warning_interval"` + LastChecked *time.Time `json:"last_checked"` + LastReminded *time.Time `json:"last_reminded"` + Instruments []AlertConfigInstrument `json:"instruments"` + AlertEmailSubscriptions []EmailAutocompleteResult `json:"alert_email_subscriptions"` + MuteConsecutiveAlerts bool `json:"mute_consecutive_alerts"` + CreateNextSubmittalFrom *time.Time `json:"-"` + AuditInfo +} + +type AlertConfigInstrument struct { + InstrumentID uuid.UUID `json:"instrument_id"` + InstrumentName string `json:"instrument_name"` +} + +func (a *AlertConfig) GetToAddresses() []string { + emails := make([]string, len(a.AlertEmailSubscriptions)) + for idx := range a.AlertEmailSubscriptions { + emails[idx] = a.AlertEmailSubscriptions[idx].Email + } + return emails +} diff --git a/api/internal/model/alert_evaluation_check.go b/api/internal/dto/alert_evaluation_check.go similarity index 71% rename from api/internal/model/alert_evaluation_check.go rename to api/internal/dto/alert_evaluation_check.go index 71669f0b..4bbcd970 100644 --- a/api/internal/model/alert_evaluation_check.go +++ b/api/internal/dto/alert_evaluation_check.go @@ -1,7 +1,6 @@ -package model +package dto import ( - "context" "fmt" "github.com/USACE/instrumentation-api/api/internal/config" @@ -33,7 +32,7 @@ func (a *AlertConfigEvaluationCheck) SetChecks(ec []*EvaluationCheck) { a.AlertChecks = ec } -func (acc AlertConfigEvaluationCheck) DoEmail(emailType string, cfg config.AlertCheckConfig) error { +func (acc AlertConfigEvaluationCheck) DoEmail(emailType string, cfg *config.AlertCheckConfig) error { if emailType == "" { return fmt.Errorf("must provide emailType") } @@ -46,7 +45,7 @@ func (acc AlertConfigEvaluationCheck) DoEmail(emailType string, cfg config.Alert "Description: \"{{.AlertConfig.Body}}\"\r\n" + "Expected Evaluation Submittals:\r\n" + "{{range .AlertChecks}}{{if or .ShouldAlert .ShouldWarn}}" + - "\t• {{.Submittal.CreateDate.Format \"Jan 02 2006 15:04:05 UTC\"}} - {{.Submittal.DueDate.Format \"Jan 02 2006 15:04:05 UTC\"}}" + + "\t• {{.Submittal.CreatedAt.Format \"Jan 02 2006 15:04:05 UTC\"}} - {{.Submittal.DueDate.Format \"Jan 02 2006 15:04:05 UTC\"}}" + "{{if .ShouldAlert}} (missing) {{else if .ShouldWarn}} (warning) {{end}}\r\n{{end}}{{end}}", } templContent, err := email.CreateEmailTemplateContent(preformatted) @@ -63,19 +62,3 @@ func (acc AlertConfigEvaluationCheck) DoEmail(emailType string, cfg config.Alert } return nil } - -const getAllIncompleteEvaluationSubmittals = ` - SELECT * FROM v_alert_check_evaluation_submittal - WHERE submittal_id = ANY( - SELECT id FROM submittal - WHERE completion_date IS NULL AND NOT marked_as_missing - ) -` - -func (q *Queries) GetAllIncompleteEvaluationSubmittals(ctx context.Context) ([]*EvaluationCheck, error) { - ecs := make([]*EvaluationCheck, 0) - if err := q.db.SelectContext(ctx, &ecs, getAllIncompleteEvaluationSubmittals); err != nil { - return nil, err - } - return ecs, nil -} diff --git a/api/internal/model/alert_measurement_check.go b/api/internal/dto/alert_measurement_check.go similarity index 74% rename from api/internal/model/alert_measurement_check.go rename to api/internal/dto/alert_measurement_check.go index 39578be7..27032803 100644 --- a/api/internal/model/alert_measurement_check.go +++ b/api/internal/dto/alert_measurement_check.go @@ -1,7 +1,6 @@ -package model +package dto import ( - "context" "fmt" "github.com/USACE/instrumentation-api/api/internal/config" @@ -40,7 +39,7 @@ func (a *AlertConfigMeasurementCheck) SetChecks(mc []*MeasurementCheck) { a.AlertChecks = mc } -func (ms AlertConfigMeasurementCheck) DoEmail(emailType string, cfg config.AlertCheckConfig) error { +func (ms AlertConfigMeasurementCheck) DoEmail(emailType string, cfg *config.AlertCheckConfig) error { if emailType == "" { return fmt.Errorf("must provide emailType") } @@ -53,7 +52,7 @@ func (ms AlertConfigMeasurementCheck) DoEmail(emailType string, cfg config.Alert "Description: \"{{.AlertConfig.Body}}\"\r\n" + "Expected Measurement Submittals:\r\n" + "{{range .AlertChecks}}" + - "\t• {{.Submittal.CreateDate.Format \"Jan 02 2006 15:04:05 UTC\"}} - {{.Submittal.DueDate.Format \"Jan 02 2006 15:04:05 UTC\"}}\r\n" + + "\t• {{.Submittal.CreatedAt.Format \"Jan 02 2006 15:04:05 UTC\"}} - {{.Submittal.DueDate.Format \"Jan 02 2006 15:04:05 UTC\"}}\r\n" + "{{range .AffectedTimeseries}}" + "\t\t• {{.InstrumentName}}: {{.TimeseriesName}} ({{.Status}})\r\n" + "{{end}}\r\n{{end}}", @@ -72,19 +71,3 @@ func (ms AlertConfigMeasurementCheck) DoEmail(emailType string, cfg config.Alert } return nil } - -const getAllIncompleteMeasurementSubmittals = ` - SELECT * FROM v_alert_check_measurement_submittal - WHERE submittal_id = ANY( - SELECT id FROM submittal - WHERE completion_date IS NULL AND NOT marked_as_missing - ) -` - -func (q *Queries) GetAllIncompleteMeasurementSubmittals(ctx context.Context) ([]*MeasurementCheck, error) { - mcs := make([]*MeasurementCheck, 0) - if err := q.db.SelectContext(ctx, &mcs, getAllIncompleteMeasurementSubmittals); err != nil { - return nil, err - } - return mcs, nil -} diff --git a/api/internal/dto/alert_subscription.go b/api/internal/dto/alert_subscription.go new file mode 100644 index 00000000..75fa77e9 --- /dev/null +++ b/api/internal/dto/alert_subscription.go @@ -0,0 +1,54 @@ +package dto + +import ( + "encoding/json" + + "github.com/USACE/instrumentation-api/api/internal/util" + "github.com/google/uuid" +) + +type AlertSubscription struct { + ID uuid.UUID `json:"id"` + AlertConfigID uuid.UUID `json:"alert_config_id" db:"alert_config_id"` + ProfileID uuid.UUID `json:"profile_id" db:"profile_id"` + AlertSubscriptionSettings +} + +type AlertSubscriptionSettings struct { + MuteUI bool `json:"mute_ui" db:"mute_ui"` + MuteNotify bool `json:"mute_notify" db:"mute_notify"` +} + +type AlertSubscriptionCollection struct { + Items []AlertSubscription `json:"items"` +} + +type EmailAlert struct { + ID uuid.UUID `json:"id"` + AlertConfigID uuid.UUID `json:"alert_config_id"` + EmailID uuid.UUID `json:"profile_id"` + MuteNotify bool `json:"mute_notify" db:"mute_notify"` +} + +type Email struct { + ID uuid.UUID `json:"id" db:"id"` + Email string `json:"email" db:"email"` +} + +func (c *AlertSubscriptionCollection) UnmarshalJSON(b []byte) error { + switch util.JSONType(b) { + case "ARRAY": + if err := json.Unmarshal(b, &c.Items); err != nil { + return err + } + case "OBJECT": + var a AlertSubscription + if err := json.Unmarshal(b, &a); err != nil { + return err + } + c.Items = []AlertSubscription{a} + default: + c.Items = make([]AlertSubscription, 0) + } + return nil +} diff --git a/api/internal/dto/autocomplete.go b/api/internal/dto/autocomplete.go new file mode 100644 index 00000000..e5b4063c --- /dev/null +++ b/api/internal/dto/autocomplete.go @@ -0,0 +1,12 @@ +package dto + +import ( + "github.com/google/uuid" +) + +type EmailAutocompleteResult struct { + ID uuid.UUID `json:"id"` + UserType string `json:"user_type" db:"user_type"` + Username *string `json:"username"` + Email string `json:"email"` +} diff --git a/api/internal/dto/aware.go b/api/internal/dto/aware.go new file mode 100644 index 00000000..dc61698e --- /dev/null +++ b/api/internal/dto/aware.go @@ -0,0 +1,25 @@ +package dto + +import ( + "github.com/google/uuid" +) + +type AwareParameter struct { + ID uuid.UUID `json:"id"` + Key string `json:"key"` + ParameterID uuid.UUID `json:"parameter_id" db:"parameter_id"` + UnitID uuid.UUID `json:"unit_id" db:"unit_id"` +} + +type AwarePlatformParameterConfig struct { + InstrumentID uuid.UUID `json:"instrument_id" db:"instrument_id"` + AwareID uuid.UUID `json:"aware_id" db:"aware_id"` + AwareParameters map[string]*uuid.UUID `json:"aware_parameters"` +} + +type AwarePlatformParameterEnabled struct { + InstrumentID uuid.UUID `json:"instrument_id" db:"instrument_id"` + AwareID uuid.UUID `json:"aware_id" db:"aware_id"` + AwareParameterKey string `json:"aware_parameter_key" db:"aware_parameter_key"` + TimeseriesID *uuid.UUID `json:"timeseries_id" db:"timeseries_id"` +} diff --git a/api/internal/dto/collection_group.go b/api/internal/dto/collection_group.go new file mode 100644 index 00000000..2739283e --- /dev/null +++ b/api/internal/dto/collection_group.go @@ -0,0 +1,28 @@ +package dto + +import ( + "time" + + "github.com/google/uuid" +) + +type CollectionGroup struct { + ID uuid.UUID `json:"id" db:"id"` + ProjectID uuid.UUID `json:"project_id" db:"project_id"` + Slug string `json:"slug" db:"slug"` + Name string `json:"name" db:"name"` + SortOrder int32 `json:"sort_order" db:"sort_order"` + AuditInfo +} + +type CollectionGroupDetails struct { + CollectionGroup + Timeseries []collectionGroupDetailsTimeseries `json:"timeseries"` +} + +type collectionGroupDetailsTimeseries struct { + Timeseries + LatestTime *time.Time `json:"latest_time" db:"latest_time"` + LatestValue *float32 `json:"latest_value" db:"latest_value"` + SortOrder int `json:"sort_order" db:"sort_order"` +} diff --git a/api/internal/dto/common.go b/api/internal/dto/common.go new file mode 100644 index 00000000..11c880aa --- /dev/null +++ b/api/internal/dto/common.go @@ -0,0 +1,126 @@ +package dto + +import ( + "encoding/json" + "fmt" + "time" + + "github.com/google/uuid" +) + +type AuditInfo struct { + CreatedBy uuid.UUID `json:"created_by"` + CreatedByUsername *string `json:"created_by_username,omitempty"` + CreatedAt time.Time `json:"created_at"` + UpdatedBy *uuid.UUID `json:"updated_by"` + UpdatedByUsername *string `json:"updated_by_username,omitempty"` + UpdatedAt *time.Time `json:"updatedd_at"` +} + +type Opts map[string]interface{} + +func (o *Opts) Scan(src interface{}) error { + b, ok := src.(string) + if !ok { + return fmt.Errorf("type assertion failed") + } + return json.Unmarshal([]byte(b), o) +} + +type IDSlug struct { + ID uuid.UUID `json:"id"` + Slug string `json:"slug"` +} + +type IDName struct { + ID uuid.UUID `json:"id"` + Name string `json:"name"` +} + +type IDSlugName struct { + IDSlug + Name string `json:"name,omitempty"` +} + +type InstrumentIDName struct { + InstrumentID uuid.UUID `json:"instrument_id"` + InstrumentName string `json:"instrument_name"` +} + +type DataloggerEquivalencyTableField struct { + ID uuid.UUID `json:"id"` + FieldName string `json:"field_name"` + DisplayName string `json:"display_name"` + InstrumentID *uuid.UUID `json:"instrument_id"` + TimeseriesID *uuid.UUID `json:"timeseries_id"` +} + +type DomainGroupOpt struct { + ID uuid.UUID `json:"id" db:"id"` + Value string `json:"value" db:"value"` + Description *string `json:"description" db:"description"` +} + +type AlertEmailSubscription struct { + ID uuid.UUID `json:"id"` + AlertConfigID uuid.UUID `json:"alert_config_id"` + EmailID uuid.UUID `json:"email_id"` + MuteNotify bool `json:"mute_notify"` +} + +type AlertCheckMeasurementSubmittalAffectedTimeseries struct { + InstrumentName string `json:"instrument_name"` + TimeseriesName string `json:"timeseries_name"` + Status string `json:"status"` +} + +type IpiMeasurement struct { + SegmentID int `json:"segment_id"` + Tilt *float64 `json:"tilt"` + IncDev *float64 `json:"inc_dev"` + CumDev *float64 `json:"cum_dev"` + Temp *float64 `json:"temp"` + Elelvation *float64 `json:"elevation"` +} + +type SaaMeasurement struct { + SegmentID int `json:"segment_id"` + X *float64 `json:"x"` + Y *float64 `json:"y"` + Z *float64 `json:"z"` + Temp *float64 `json:"temp"` + XIncrement *float64 `json:"x_increment"` + YIncrement *float64 `json:"y_increment"` + ZIncrement *float64 `json:"z_increment"` + TempIncrement *float64 `json:"temp_increment"` + XCumDev *float64 `json:"x_cum_dev"` + YCumDev *float64 `json:"y_cum_dev"` + ZCumDev *float64 `json:"z_cum_dev"` + TempCumDev *float64 `json:"temp_cum_dev"` + Elevation *float64 `json:"elevation"` +} + +type ReportConfigGlobalOverrides struct { + DateRange TextOption `json:"date_range"` + ShowMasked ToggleOption `json:"show_masked"` + ShowNonvalidated ToggleOption `json:"show_nonvalidated"` +} + +type TextOption struct { + Enabled bool `json:"enabled"` + Value string `json:"value"` +} + +type ToggleOption struct { + Enabled bool `json:"enabled"` + Value bool `json:"value"` +} + +type IDSlugCollection struct { + Items []IDSlug `json:"items"` +} + +// Shortener allows a shorter representation of an object. Typically, ID and Slug fields +type Shortener[T any] interface { + Shorten() T +} diff --git a/api/internal/dto/datalogger.go b/api/internal/dto/datalogger.go new file mode 100644 index 00000000..ef711b7e --- /dev/null +++ b/api/internal/dto/datalogger.go @@ -0,0 +1,51 @@ +package dto + +import ( + "encoding/json" + "time" + + "github.com/google/uuid" + "github.com/jackc/pgtype" +) + +type Telemetry struct { + ID uuid.UUID + TypeID string + TypeSlug string + TypeName string +} + +type Datalogger struct { + ID uuid.UUID `json:"id" db:"id"` + Name string `json:"name" db:"name"` + SN string `json:"sn" db:"sn"` + ProjectID uuid.UUID `json:"project_id" db:"project_id"` + Slug string `json:"slug" db:"slug"` + ModelID uuid.UUID `json:"model_id" db:"model_id"` + Model *string `json:"model" db:"model"` + Errors []string `json:"errors" db:"-"` + PgErrors pgtype.TextArray `json:"-" db:"errors"` + Tables dbJSONSlice[DataloggerTable] `json:"tables" db:"tables"` + AuditInfo +} + +type DataloggerWithKey struct { + Datalogger + Key string `json:"key"` +} + +type DataloggerTable struct { + ID uuid.UUID `json:"id" db:"id"` + TableName string `json:"table_name" db:"table_name"` +} + +type DataloggerTablePreview struct { + DataloggerTableID uuid.UUID `json:"datalogger_table_id" db:"datalogger_table_id"` + UpdatedAt time.Time `json:"updated_at"` + Preview json.RawMessage `json:"preview" db:"preview"` +} + +type DataloggerError struct { + DataloggerTableID uuid.UUID `json:"datalogger_id" db:"datalogger_id"` + Errors []string `json:"errors" db:"errors"` +} diff --git a/api/internal/model/datalogger_parser.go b/api/internal/dto/datalogger_parser.go similarity index 99% rename from api/internal/model/datalogger_parser.go rename to api/internal/dto/datalogger_parser.go index 411404f7..b7605d73 100644 --- a/api/internal/model/datalogger_parser.go +++ b/api/internal/dto/datalogger_parser.go @@ -1,4 +1,4 @@ -package model +package dto import ( "encoding/csv" diff --git a/api/internal/dto/db.go b/api/internal/dto/db.go new file mode 100644 index 00000000..36cd0b0f --- /dev/null +++ b/api/internal/dto/db.go @@ -0,0 +1,41 @@ +package dto + +import ( + "encoding/json" + "fmt" + + "github.com/lib/pq" +) + +type dbSlice[T any] []T + +func (d *dbSlice[T]) Scan(src interface{}) error { + value := make([]T, 0) + if err := pq.Array(&value).Scan(src); err != nil { + return err + } + *d = dbSlice[T](value) + return nil +} + +type dbJSONSlice[T any] []T + +func (d *dbJSONSlice[T]) Scan(src interface{}) error { + b, ok := src.(string) + if !ok { + return fmt.Errorf("failed type assertion") + } + return json.Unmarshal([]byte(b), d) +} + +func MapToStruct[T any](v map[string]interface{}) (T, error) { + var o T + s, err := json.Marshal(v) + if err != nil { + return o, err + } + if err := json.Unmarshal(s, &o); err != nil { + return o, err + } + return o, nil +} diff --git a/api/internal/dto/district_rollup.go b/api/internal/dto/district_rollup.go new file mode 100644 index 00000000..7e223289 --- /dev/null +++ b/api/internal/dto/district_rollup.go @@ -0,0 +1,21 @@ +package dto + +import ( + "time" + + "github.com/google/uuid" +) + +type DistrictRollup struct { + AlertTypeID uuid.UUID `json:"alert_type_id" db:"alert_type_id"` + OfficeID *uuid.UUID `json:"office_id" db:"office_id"` + DistrictInitials *string `json:"district_initials" db:"district_initials"` + ProjectName string `json:"project_name" db:"project_name"` + ProjectID uuid.UUID `json:"project_id" db:"project_id"` + Month time.Time `json:"month" db:"month"` + ExpectedTotalSubmittals int `json:"expected_total_submittals" db:"expected_total_submittals"` + ActualTotalSubmittals int `json:"actual_total_submittals" db:"actual_total_submittals"` + RedSubmittals int `json:"red_submittals" db:"red_submittals"` + YellowSubmittals int `json:"yellow_submittals" db:"yellow_submittals"` + GreenSubmittals int `json:"green_submittals" db:"green_submittals"` +} diff --git a/api/internal/dto/domain.go b/api/internal/dto/domain.go new file mode 100644 index 00000000..11224dff --- /dev/null +++ b/api/internal/dto/domain.go @@ -0,0 +1,27 @@ +package dto + +import ( + "github.com/google/uuid" +) + +type Domain struct { + ID uuid.UUID `json:"id" db:"id"` + Group string `json:"group" db:"group"` + Value string `json:"value" db:"value"` + Description *string `json:"description" db:"description"` +} + +type DomainGroup struct { + Group string `json:"group" db:"group"` + Opts dbJSONSlice[DomainGroupOption] `json:"opts" db:"opts"` +} + +type DomainGroupOption struct { + ID uuid.UUID `json:"id" db:"id"` + Value string `json:"value" db:"value"` + Description *string `json:"description" db:"description"` +} + +type DomainGroupCollection []DomainGroup + +type DomainMap map[string][]DomainGroupOption diff --git a/api/internal/dto/equivalency_table.go b/api/internal/dto/equivalency_table.go new file mode 100644 index 00000000..a1270cac --- /dev/null +++ b/api/internal/dto/equivalency_table.go @@ -0,0 +1,20 @@ +package dto + +import ( + "github.com/google/uuid" +) + +type EquivalencyTable struct { + DataloggerID uuid.UUID `json:"datalogger_id" db:"datalogger_id"` + DataloggerTableID uuid.UUID `json:"datalogger_table_id" db:"datalogger_table_id"` + DataloggerTableName string `json:"datalogger_table_name" db:"datalogger_table_name"` + Rows dbJSONSlice[EquivalencyTableRow] `json:"rows" db:"fields"` +} + +type EquivalencyTableRow struct { + ID uuid.UUID `json:"id" db:"id"` + FieldName string `json:"field_name" db:"field_name"` + DisplayName string `json:"display_name" db:"display_name"` + InstrumentID *uuid.UUID `json:"instrument_id" db:"instrument_id"` + TimeseriesID *uuid.UUID `json:"timeseries_id" db:"timeseries_id"` +} diff --git a/api/internal/model/error.go b/api/internal/dto/error.go similarity index 98% rename from api/internal/model/error.go rename to api/internal/dto/error.go index ac355381..88111bb7 100644 --- a/api/internal/model/error.go +++ b/api/internal/dto/error.go @@ -1,4 +1,4 @@ -package model +package dto import ( "fmt" diff --git a/api/internal/dto/evaluation.go b/api/internal/dto/evaluation.go new file mode 100644 index 00000000..a8c4cae1 --- /dev/null +++ b/api/internal/dto/evaluation.go @@ -0,0 +1,27 @@ +package dto + +import ( + "time" + + "github.com/google/uuid" +) + +type Evaluation struct { + ID uuid.UUID `json:"id"` + ProjectID uuid.UUID `json:"project_id"` + ProjectName string `json:"project_name"` + AlertConfigID *uuid.UUID `json:"alert_config_id"` + AlertConfigName *string `json:"alert_config_name"` + SubmittalID *uuid.UUID `json:"submittal_id"` + Name string `json:"name"` + Body string `json:"body"` + StartedAt time.Time `json:"started_at"` + EndedAt time.Time `json:"ended_at"` + Instruments []EvaluationInstrument `json:"instruments"` + AuditInfo +} + +type EvaluationInstrument struct { + InstrumentID uuid.UUID `json:"instrument_id"` + InstrumentName string `json:"instrument_name"` +} diff --git a/api/internal/dto/heartbeat.go b/api/internal/dto/heartbeat.go new file mode 100644 index 00000000..2a606463 --- /dev/null +++ b/api/internal/dto/heartbeat.go @@ -0,0 +1,9 @@ +package dto + +import ( + "time" +) + +type Heartbeat struct { + Time time.Time `json:"time"` +} diff --git a/api/internal/dto/home.go b/api/internal/dto/home.go new file mode 100644 index 00000000..795ce63c --- /dev/null +++ b/api/internal/dto/home.go @@ -0,0 +1,9 @@ +package dto + +type Home struct { + InstrumentCount int `json:"instrument_count" db:"instrument_count"` + InstrumetGroupCount int `json:"instrument_group_count" db:"instrument_group_count"` + ProjectCount int `json:"project_count" db:"project_count"` + NewInstruments7D int `json:"new_instruments_7d" db:"new_instruments_7d"` + NewMeasurements2H int `json:"new_measurements_2h" db:"new_measurements_2h"` +} diff --git a/api/internal/dto/instrument.go b/api/internal/dto/instrument.go new file mode 100644 index 00000000..41e6bfa6 --- /dev/null +++ b/api/internal/dto/instrument.go @@ -0,0 +1,59 @@ +package dto + +import ( + "encoding/json" + "time" + + "github.com/google/uuid" +) + +type Instrument struct { + ID uuid.UUID `json:"id"` + Slug string `json:"slug"` + Name string `json:"name"` + AwareID *uuid.UUID `json:"aware_id,omitempty"` + Groups []uuid.UUID `json:"groups"` + Constants []uuid.UUID `json:"constants"` + AlertConfigs []uuid.UUID `json:"alert_configs"` + StatusID uuid.UUID `json:"status_id"` + Status string `json:"status"` + StatusTime time.Time `json:"status_time"` + Deleted bool `json:"-"` + TypeID uuid.UUID `json:"type_id"` + Type string `json:"type"` + Icon *string `json:"icon"` + Geometry json.RawMessage `json:"geometry,omitempty"` + Station *int32 `json:"station"` + StationOffset *int32 `json:"offset"` + Projects []IDSlugName `json:"projects"` + NIDID *string `json:"nid_id"` + USGSID *string `json:"usgs_id"` + HasCwms bool `json:"has_cwms"` + ShowCwmsTab bool `json:"show_cwms_tab"` + Opts Opts `json:"opts"` + AuditInfo +} + +// InstrumentCollection is a collection of Instrument items +type InstrumentCollection []Instrument + +// Shorten returns an instrument collection with individual objects limited to ID and Struct fields +func (ic InstrumentCollection) Shorten() IDSlugCollection { + ss := IDSlugCollection{Items: make([]IDSlug, 0)} + for _, n := range ic { + s := IDSlug{ID: n.ID, Slug: n.Slug} + + ss.Items = append(ss.Items, s) + } + return ss +} + +type InstrumentCount struct { + InstrumentCount int `json:"instrument_count"` +} + +type InstrumentsProjectCount struct { + InstrumentID uuid.UUID `json:"instrument_id"` + InstrumentName string `json:"instrument_name"` + ProjectCount int `json:"project_count"` +} diff --git a/api/internal/dto/instrument_assign.go b/api/internal/dto/instrument_assign.go new file mode 100644 index 00000000..d9ad0d1b --- /dev/null +++ b/api/internal/dto/instrument_assign.go @@ -0,0 +1,28 @@ +package dto + +import ( + "github.com/google/uuid" +) + +type ReasonCode int + +const ( + None ReasonCode = iota + Unauthorized + InvalidName + InvalidUnassign +) + +type InstrumentsValidation struct { + ReasonCode ReasonCode `json:"-"` + IsValid bool `json:"is_valid"` + Errors []string `json:"errors"` +} + +type ProjectInstrumentAssignments struct { + InstrumentIDs []uuid.UUID `json:"instrument_ids"` +} + +type InstrumentProjectAssignments struct { + ProjectIDs []uuid.UUID `json:"project_ids"` +} diff --git a/api/internal/dto/instrument_group.go b/api/internal/dto/instrument_group.go new file mode 100644 index 00000000..ec4dbc9a --- /dev/null +++ b/api/internal/dto/instrument_group.go @@ -0,0 +1,51 @@ +package dto + +import ( + "encoding/json" + + "github.com/USACE/instrumentation-api/api/internal/util" + "github.com/google/uuid" +) + +type InstrumentGroup struct { + ID uuid.UUID `json:"id"` + Deleted bool `json:"-"` + Slug string `json:"slug"` + Name string `json:"name"` + Description string `json:"description"` + ProjectID *uuid.UUID `json:"project_id" db:"project_id"` + InstrumentCount int `json:"instrument_count" db:"instrument_count"` + TimeseriesCount int `json:"timeseries_count" db:"timeseries_count"` + AuditInfo +} + +type InstrumentGroupCollection struct { + Items []InstrumentGroup +} + +func (c InstrumentGroupCollection) Shorten() IDSlugCollection { + ss := IDSlugCollection{Items: make([]IDSlug, 0)} + for _, n := range c.Items { + s := IDSlug{ID: n.ID, Slug: n.Slug} + ss.Items = append(ss.Items, s) + } + return ss +} + +func (c *InstrumentGroupCollection) UnmarshalJSON(b []byte) error { + switch util.JSONType(b) { + case "ARRAY": + if err := json.Unmarshal(b, &c.Items); err != nil { + return err + } + case "OBJECT": + var g InstrumentGroup + if err := json.Unmarshal(b, &g); err != nil { + return err + } + c.Items = []InstrumentGroup{g} + default: + c.Items = make([]InstrumentGroup, 0) + } + return nil +} diff --git a/api/internal/dto/instrument_incl.go b/api/internal/dto/instrument_incl.go new file mode 100644 index 00000000..bd708a8c --- /dev/null +++ b/api/internal/dto/instrument_incl.go @@ -0,0 +1,29 @@ +package dto + +import ( + "time" + + "github.com/google/uuid" +) + +var ( + InclParameterID = uuid.MustParse("3ea5ed77-c926-4696-a580-a3fde0f9a556") +) + +type InclOpts struct { + InstrumentID uuid.UUID `json:"-" db:"instrument_id"` + NumSegments int `json:"num_segments" db:"num_segments"` + BottomElevationTimeseriesID uuid.UUID `json:"bottom_elevation_timeseries_id" db:"bottom_elevation_timeseries_id"` + BottomElevation float64 `json:"bottom_elevation" db:"bottom_elevation"` + InitialTime *time.Time `json:"initial_time" db:"initial_time"` +} + +type InclSegment struct { + ID int `json:"id" db:"id"` + InstrumentID uuid.UUID `json:"instrument_id" db:"instrument_id"` + DepthTimeseriesID *uuid.UUID `json:"depth_timeseries_id" db:"depth_timeseries_id"` + A0TimeseriesID *uuid.UUID `json:"a0_timeseries_id" db:"a0_timeseries_id"` + A180TimeseriesID *uuid.UUID `json:"a180_timeseries_id" db:"a180_timeseries_id"` + B0TimeseriesID *uuid.UUID `json:"b0_timeseries_id" db:"b0_timeseries_id"` + B180TimeseriesID *uuid.UUID `json:"b180_timeseries_id" db:"b180_timeseries_id"` +} diff --git a/api/internal/dto/instrument_ipi.go b/api/internal/dto/instrument_ipi.go new file mode 100644 index 00000000..d4213bc6 --- /dev/null +++ b/api/internal/dto/instrument_ipi.go @@ -0,0 +1,29 @@ +package dto + +import ( + "time" + + "github.com/google/uuid" +) + +var ( + IpiParameterID = uuid.MustParse("a9a5ad45-b2e5-4744-816e-d3184f2c08bd") +) + +type IpiOpts struct { + InstrumentID uuid.UUID `json:"-" db:"instrument_id"` + NumSegments int `json:"num_segments" db:"num_segments"` + BottomElevationTimeseriesID uuid.UUID `json:"bottom_elevation_timeseries_id" db:"bottom_elevation_timeseries_id"` + BottomElevation float64 `json:"bottom_elevation" db:"bottom_elevation"` + InitialTime *time.Time `json:"initial_time" db:"initial_time"` +} + +type IpiSegment struct { + ID int `json:"id" db:"id"` + InstrumentID uuid.UUID `json:"instrument_id" db:"instrument_id"` + Length *float64 `json:"length" db:"length"` + LengthTimeseriesID uuid.UUID `json:"length_timeseries_id" db:"length_timeseries_id"` + TiltTimeseriesID *uuid.UUID `json:"tilt_timeseries_id" db:"tilt_timeseries_id"` + IncDevTimeseriesID *uuid.UUID `json:"inc_dev_timeseries_id" db:"inc_dev_timeseries_id"` + TempTimeseriesID *uuid.UUID `json:"temp_timeseries_id" db:"temp_timeseries_id"` +} diff --git a/api/internal/dto/instrument_note.go b/api/internal/dto/instrument_note.go new file mode 100644 index 00000000..b0781423 --- /dev/null +++ b/api/internal/dto/instrument_note.go @@ -0,0 +1,40 @@ +package dto + +import ( + "encoding/json" + "time" + + "github.com/USACE/instrumentation-api/api/internal/util" + "github.com/google/uuid" +) + +type InstrumentNote struct { + ID uuid.UUID `json:"id"` + InstrumentID uuid.UUID `json:"instrument_id" db:"instrument_id"` + Title string `json:"title"` + Body string `json:"body"` + Time time.Time `json:"time"` + AuditInfo +} + +type InstrumentNoteCollection struct { + Items []InstrumentNote +} + +func (c *InstrumentNoteCollection) UnmarshalJSON(b []byte) error { + switch util.JSONType(b) { + case "ARRAY": + if err := json.Unmarshal(b, &c.Items); err != nil { + return err + } + case "OBJECT": + var n InstrumentNote + if err := json.Unmarshal(b, &n); err != nil { + return err + } + c.Items = []InstrumentNote{n} + default: + c.Items = make([]InstrumentNote, 0) + } + return nil +} diff --git a/api/internal/dto/instrument_saa.go b/api/internal/dto/instrument_saa.go new file mode 100644 index 00000000..68a5d1d5 --- /dev/null +++ b/api/internal/dto/instrument_saa.go @@ -0,0 +1,30 @@ +package dto + +import ( + "time" + + "github.com/google/uuid" +) + +var ( + SaaParameterID = uuid.MustParse("6d12ca4c-b618-41cd-87a2-a248980a0d69") +) + +type SaaOpts struct { + InstrumentID uuid.UUID `json:"-" db:"instrument_id"` + NumSegments int `json:"num_segments" db:"num_segments"` + BottomElevationTimeseriesID uuid.UUID `json:"bottom_elevation_timeseries_id" db:"bottom_elevation_timeseries_id"` + BottomElevation float64 `json:"bottom_elevation" db:"bottom_elevation"` + InitialTime *time.Time `json:"initial_time" db:"initial_time"` +} + +type SaaSegment struct { + ID int `json:"id" db:"id"` + InstrumentID uuid.UUID `json:"instrument_id" db:"instrument_id"` + Length *float64 `json:"length" db:"length"` + LengthTimeseriesID uuid.UUID `json:"length_timeseries_id" db:"length_timeseries_id"` + XTimeseriesID *uuid.UUID `json:"x_timeseries_id" db:"x_timeseries_id"` + YTimeseriesID *uuid.UUID `json:"y_timeseries_id" db:"y_timeseries_id"` + ZTimeseriesID *uuid.UUID `json:"z_timeseries_id" db:"z_timeseries_id"` + TempTimeseriesID *uuid.UUID `json:"temp_timeseries_id" db:"temp_timeseries_id"` +} diff --git a/api/internal/dto/instrument_status.go b/api/internal/dto/instrument_status.go new file mode 100644 index 00000000..52521ada --- /dev/null +++ b/api/internal/dto/instrument_status.go @@ -0,0 +1,39 @@ +package dto + +import ( + "encoding/json" + "time" + + "github.com/USACE/instrumentation-api/api/internal/util" + "github.com/google/uuid" +) + +type InstrumentStatus struct { + ID uuid.UUID `json:"id"` + Time time.Time `json:"time"` + StatusID uuid.UUID `json:"status_id" db:"status_id"` + Status string `json:"status"` +} + +type InstrumentStatusCollection struct { + Items []InstrumentStatus +} + +// UnmarshalJSON implements the UnmarshalJSON interface +func (c *InstrumentStatusCollection) UnmarshalJSON(b []byte) error { + switch util.JSONType(b) { + case "ARRAY": + if err := json.Unmarshal(b, &c.Items); err != nil { + return err + } + case "OBJECT": + var s InstrumentStatus + if err := json.Unmarshal(b, &s); err != nil { + return err + } + c.Items = []InstrumentStatus{s} + default: + c.Items = make([]InstrumentStatus, 0) + } + return nil +} diff --git a/api/internal/model/job.go b/api/internal/dto/job.go similarity index 93% rename from api/internal/model/job.go rename to api/internal/dto/job.go index 40773ffa..0e2db95f 100644 --- a/api/internal/model/job.go +++ b/api/internal/dto/job.go @@ -1,4 +1,4 @@ -package model +package dto import "github.com/google/uuid" diff --git a/api/internal/dto/measurement.go b/api/internal/dto/measurement.go new file mode 100644 index 00000000..6365f88d --- /dev/null +++ b/api/internal/dto/measurement.go @@ -0,0 +1,74 @@ +package dto + +import ( + "encoding/json" + "fmt" + "math" + "strings" + "time" + + "github.com/USACE/instrumentation-api/api/internal/util" + "github.com/google/uuid" +) + +type TimeseriesMeasurementCollectionCollection struct { + Items []MeasurementCollection +} + +func (cc *TimeseriesMeasurementCollectionCollection) UnmarshalJSON(b []byte) error { + switch util.JSONType(b) { + case "ARRAY": + if err := json.Unmarshal(b, &cc.Items); err != nil { + return err + } + case "OBJECT": + var mc MeasurementCollection + if err := json.Unmarshal(b, &mc); err != nil { + return err + } + cc.Items = []MeasurementCollection{mc} + default: + cc.Items = make([]MeasurementCollection, 0) + } + return nil +} + +type Measurement struct { + TimeseriesID uuid.UUID `json:"-" db:"timeseries_id"` + Time time.Time `json:"time"` + Value FloatNanInf `json:"value"` + Error string `json:"error,omitempty"` + TimeseriesNote +} + +type FloatNanInf float64 + +func (j FloatNanInf) MarshalJSON() ([]byte, error) { + if math.IsNaN(float64(j)) || math.IsInf(float64(j), 0) { + return []byte("null"), nil + } + + return []byte(fmt.Sprintf("%f", float64(j))), nil +} + +func (j *FloatNanInf) UnmarshalJSON(v []byte) error { + switch strings.ToLower(string(v)) { + case `"nan"`, "nan", "", "null", "undefined": + *j = FloatNanInf(math.NaN()) + case `"inf"`, "inf": + *j = FloatNanInf(math.Inf(1)) + default: + var fv float64 + if err := json.Unmarshal(v, &fv); err != nil { + *j = FloatNanInf(math.NaN()) + return nil + } + *j = FloatNanInf(fv) + } + return nil +} + +type MeasurementCollection struct { + TimeseriesID uuid.UUID `json:"timeseries_id" db:"timeseries_id"` + Items []Measurement `json:"items"` +} diff --git a/api/internal/dto/measurement_inclinometer.go b/api/internal/dto/measurement_inclinometer.go new file mode 100644 index 00000000..96cafc53 --- /dev/null +++ b/api/internal/dto/measurement_inclinometer.go @@ -0,0 +1,84 @@ +package dto + +import ( + "encoding/json" + "time" + + "github.com/USACE/instrumentation-api/api/internal/util" + "github.com/google/uuid" + "github.com/jmoiron/sqlx/types" +) + +// DEPRECATED: please use VInclMeasurement (created from regular timeseries through InclOpts and InclSegments) +type InclinometerMeasurement struct { + TimeseriesID uuid.UUID `json:"-"` + Time time.Time `json:"time"` + Values types.JSONText `json:"values"` + CreatedBy uuid.UUID `json:"created_by"` + CreatedAt time.Time `json:"created_at"` +} + +// DEPRECATED: please use VInclMeasurement (created from regular timeseries through InclOpts and InclSegments) +type InclinometerMeasurementValues struct { + Depth int `json:"depth" db:"depth"` + A0 float32 `json:"a0" db:"a0"` + A180 float32 `json:"a180" db:"a180"` + B0 float32 `json:"b0" db:"b0"` + B180 float32 `json:"b180" db:"b180"` + AChecksum float32 `json:"aChecksum" db:"a_checksum"` + AComb float32 `json:"aComb" db:"a_comb"` + AIncrement float32 `json:"aIncrement" db:"a_increment"` + ACumDev float32 `json:"aCumDev" db:"a_cum_dev"` + BChecksum float32 `json:"bChecksum" db:"b_checksum"` + BComb float32 `json:"bComb" db:"b_comb"` + BIncrement float32 `json:"bIncrement" db:"b_increment"` + BCumDev float32 `json:"bCumDev" db:"b_cum_dev"` +} + +// DEPRECATED: please use VInclMeasurement (created from regular timeseries through InclOpts and InclSegments) +type InclinometerMeasurementLean map[time.Time]types.JSONText + +// DEPRECATED: please use VInclMeasurement (created from regular timeseries through InclOpts and InclSegments) +type InclinometerMeasurementCollection struct { + TimeseriesID uuid.UUID `json:"timeseries_id" db:"timeseries_id"` + Inclinometers []InclinometerMeasurement `json:"inclinometers"` +} + +// DEPRECATED: please use VInclMeasurement (created from regular timeseries through InclOpts and InclSegments) +type InclinometerMeasurementCollectionLean struct { + TimeseriesID uuid.UUID `json:"timeseries_id" db:"timeseries_id"` + Items []InclinometerMeasurementLean `json:"items"` +} + +// DEPRECATED: please use VInclMeasurement (created from regular timeseries through InclOpts and InclSegments) +type InclinometerMeasurementCollectionCollection struct { + Items []InclinometerMeasurementCollection +} + +// DEPRECATED: please use VInclMeasurement (created from regular timeseries through InclOpts and InclSegments) +func (cc *InclinometerMeasurementCollectionCollection) TimeseriesIDs() map[uuid.UUID]struct{} { + dd := make(map[uuid.UUID]struct{}) + for _, item := range cc.Items { + dd[item.TimeseriesID] = struct{}{} + } + return dd +} + +// DEPRECATED: please use VInclMeasurement (created from regular timeseries through InclOpts and InclSegments) +func (cc *InclinometerMeasurementCollectionCollection) UnmarshalJSON(b []byte) error { + switch util.JSONType(b) { + case "ARRAY": + if err := json.Unmarshal(b, &cc.Items); err != nil { + return err + } + case "OBJECT": + var mc InclinometerMeasurementCollection + if err := json.Unmarshal(b, &mc); err != nil { + return err + } + cc.Items = []InclinometerMeasurementCollection{mc} + default: + cc.Items = make([]InclinometerMeasurementCollection, 0) + } + return nil +} diff --git a/api/internal/dto/plot_config.go b/api/internal/dto/plot_config.go new file mode 100644 index 00000000..b0107eff --- /dev/null +++ b/api/internal/dto/plot_config.go @@ -0,0 +1,73 @@ +package dto + +import ( + "fmt" + "strings" + "time" + + "github.com/USACE/instrumentation-api/api/internal/util" + "github.com/google/uuid" +) + +const ( + ScatterLinePlotType = "scatter-line" + ProfilePlotType = "profile" + ContourPlotType = "contour" + BullseyePlotType = "bullseye" +) + +type PlotConfig struct { + ID uuid.UUID `json:"id"` + Name string `json:"name"` + Slug string `json:"slug"` + ProjectID uuid.UUID `json:"project_id" db:"project_id"` + ReportConfigs dbJSONSlice[IDSlugName] `json:"report_configs" db:"report_configs"` + PlotType string `json:"plot_type" db:"plot_type"` + Display Opts `json:"display" db:"display"` + PlotConfigSettings + AuditInfo +} + +// PlotConfigSettings describes options for displaying the plot consistently. +// Specifically, whether to ignore data entries in a timeseries that have been masked, +// or whether to display user comments. +type PlotConfigSettings struct { + ShowMasked bool `json:"show_masked" db:"show_masked"` + ShowNonValidated bool `json:"show_nonvalidated" db:"show_nonvalidated"` + ShowComments bool `json:"show_comments" db:"show_comments"` + AutoRange bool `json:"auto_range" db:"auto_range"` + DateRange string `json:"date_range" db:"date_range"` + Threshold int `json:"threshold" db:"threshold"` +} + +// DateRangeTimeWindow creates a TimeWindow from a date range string. +// +// Acceptable date range strings are "lifetime", "5 years", "1 year", or a fixed date in the +// format "YYYY-MM-DD YYYY-MM-DD" with after and before dates separated by a single whitespace. +func (pc *PlotConfig) DateRangeTimeWindow() (util.TimeWindow, error) { + switch dr := strings.ToLower(pc.DateRange); dr { + case "lifetime": + return util.TimeWindow{After: time.Time{}, Before: time.Now()}, nil + case "5 years": + return util.TimeWindow{After: time.Now().AddDate(-5, 0, 0), Before: time.Now()}, nil + case "1 year": + return util.TimeWindow{After: time.Now().AddDate(-1, 0, 0), Before: time.Now()}, nil + case "1 month": + return util.TimeWindow{After: time.Now().AddDate(0, -1, 0), Before: time.Now()}, nil + default: + cdr := strings.Split(dr, " ") + invalidDateErr := fmt.Errorf("invalid date range; custom date range must be in format \"YYYY-MM-DD YYYY-MM-DD\"") + if len(cdr) != 2 { + return util.TimeWindow{}, invalidDateErr + } + after, err := time.Parse("2006-01-02", cdr[0]) + if err != nil { + return util.TimeWindow{}, invalidDateErr + } + before, err := time.Parse("2006-01-02", cdr[1]) + if err != nil { + return util.TimeWindow{}, invalidDateErr + } + return util.TimeWindow{After: after, Before: before}, nil + } +} diff --git a/api/internal/dto/plot_config_bullseye.go b/api/internal/dto/plot_config_bullseye.go new file mode 100644 index 00000000..65ed8fff --- /dev/null +++ b/api/internal/dto/plot_config_bullseye.go @@ -0,0 +1,33 @@ +package dto + +import ( + "encoding/json" + "fmt" + "time" + + "github.com/google/uuid" +) + +type PlotConfigBullseyePlot struct { + PlotConfig + Display PlotConfigBullseyePlotDisplay `json:"display" db:"display"` +} + +type PlotConfigBullseyePlotDisplay struct { + XAxisTimeseriesID uuid.UUID `json:"x_axis_timeseries_id" db:"x_axis_timeseries_id"` + YAxisTimeseriesID uuid.UUID `json:"y_axis_timeseries_id" db:"y_axis_timeseries_id"` +} + +func (d *PlotConfigBullseyePlotDisplay) Scan(src interface{}) error { + b, ok := src.(string) + if !ok { + return fmt.Errorf("type assertion failed") + } + return json.Unmarshal([]byte(b), d) +} + +type PlotConfigMeasurementBullseyePlot struct { + Time time.Time `json:"time" db:"time"` + X *float64 `json:"x" db:"x"` + Y *float64 `json:"y" db:"y"` +} diff --git a/api/internal/dto/plot_config_contour.go b/api/internal/dto/plot_config_contour.go new file mode 100644 index 00000000..c67167cb --- /dev/null +++ b/api/internal/dto/plot_config_contour.go @@ -0,0 +1,37 @@ +package dto + +import ( + "encoding/json" + "fmt" + "time" + + "github.com/google/uuid" +) + +type PlotConfigContourPlot struct { + PlotConfig + Display PlotConfigContourPlotDisplay `json:"display" db:"display"` +} + +type PlotConfigContourPlotDisplay struct { + TimeseriesIDs dbSlice[uuid.UUID] `json:"timeseries_ids" db:"timeseries_ids"` + Time *time.Time `json:"time" db:"time"` + LocfBackfill string `json:"locf_backfill" db:"locf_backfill"` + GradientSmoothing bool `json:"gradient_smoothing" db:"gradient_smoothing"` + ContourSmoothing bool `json:"contour_smoothing" db:"contour_smoothing"` + ShowLabels bool `json:"show_labels" db:"show_labels"` +} + +func (d *PlotConfigContourPlotDisplay) Scan(src interface{}) error { + b, ok := src.(string) + if !ok { + return fmt.Errorf("type assertion failed") + } + return json.Unmarshal([]byte(b), d) +} + +type PlotConfigMeasurementContourPlot struct { + X float64 `json:"x" db:"x"` + Y float64 `json:"y" db:"y"` + Z *float64 `json:"z" db:"z"` +} diff --git a/api/internal/dto/plot_config_profile.go b/api/internal/dto/plot_config_profile.go new file mode 100644 index 00000000..ec964df1 --- /dev/null +++ b/api/internal/dto/plot_config_profile.go @@ -0,0 +1,26 @@ +package dto + +import ( + "encoding/json" + "fmt" + + "github.com/google/uuid" +) + +type PlotConfigProfilePlot struct { + PlotConfig + Display PlotConfigProfilePlotDisplay `json:"display" db:"display"` +} + +type PlotConfigProfilePlotDisplay struct { + InstrumentID uuid.UUID `json:"instrument_id" db:"instrument_id"` + InstrumentType string `json:"instrument_type,omitempty" db:"instrument_type"` +} + +func (d *PlotConfigProfilePlotDisplay) Scan(src interface{}) error { + b, ok := src.(string) + if !ok { + return fmt.Errorf("type assertion failed") + } + return json.Unmarshal([]byte(b), d) +} diff --git a/api/internal/dto/plot_config_scatter_line.go b/api/internal/dto/plot_config_scatter_line.go new file mode 100644 index 00000000..7f81c825 --- /dev/null +++ b/api/internal/dto/plot_config_scatter_line.go @@ -0,0 +1,55 @@ +package dto + +import ( + "encoding/json" + "fmt" + + "github.com/google/uuid" +) + +type PlotConfigScatterLinePlot struct { + PlotConfig + Display PlotConfigScatterLineDisplay `json:"display" db:"display"` + // TODO AlertConfigIDs []string +} + +type PlotConfigScatterLineDisplay struct { + Traces []PlotConfigScatterLineTimeseriesTrace `json:"traces"` + Layout PlotConfigScatterLineLayout `json:"layout"` +} + +func (d *PlotConfigScatterLineDisplay) Scan(src interface{}) error { + b, ok := src.(string) + if !ok { + return fmt.Errorf("type assertion failed") + } + return json.Unmarshal([]byte(b), d) +} + +type PlotConfigScatterLineTimeseriesTrace struct { + PlotConfigurationID uuid.UUID `json:"plot_configuration_id"` + TimeseriesID uuid.UUID `json:"timeseries_id"` + Name string `json:"name"` // read-only + Parameter string `json:"parameter"` // read-only + TraceOrder int `json:"trace_order"` + TraceType string `json:"trace_type"` + Color string `json:"color"` + LineStyle string `json:"line_style"` + Width float32 `json:"width"` + ShowMarkers bool `json:"show_markers"` + YAxis string `json:"y_axis"` // y1 or y2, default y1 +} + +type PlotConfigScatterLineLayout struct { + CustomShapes []PlotConfigScatterLineCustomShape `json:"custom_shapes"` + YAxisTitle *string `json:"y_axis_title"` + Y2AxisTitle *string `json:"y2_axis_title"` +} + +type PlotConfigScatterLineCustomShape struct { + PlotConfigurationID uuid.UUID `json:"plot_configuration_id"` + Enabled bool `json:"enabled"` + Name string `json:"name"` + DataPoint float32 `json:"data_point"` + Color string `json:"color"` +} diff --git a/api/internal/dto/profile.go b/api/internal/dto/profile.go new file mode 100644 index 00000000..c6a00322 --- /dev/null +++ b/api/internal/dto/profile.go @@ -0,0 +1,51 @@ +package dto + +import ( + "time" + + "github.com/google/uuid" +) + +type Profile struct { + ID uuid.UUID `json:"id" db:"id"` + Tokens []TokenInfoProfile `json:"tokens"` + IsAdmin bool `json:"is_admin" db:"is_admin"` + Roles dbSlice[string] `json:"roles" db:"roles"` + ProfileInfo +} + +type TokenInfoProfile struct { + TokenID string `json:"token_id" db:"token_id"` + Issued time.Time `json:"issued"` +} + +type ProfileInfo struct { + EDIPI int `json:"-" db:"edipi"` + Username string `json:"username" db:"username"` + DisplayName string `json:"display_name" db:"display_name"` + Email string `json:"email" db:"email"` +} + +type TokenInfo struct { + ID uuid.UUID `json:"-"` + TokenID string `json:"token_id" db:"token_id"` + ProfileID uuid.UUID `json:"profile_id" db:"profile_id"` + Issued time.Time `json:"issued"` + Hash string `json:"-"` +} + +// Token includes all TokenInfo and the actual token string generated for a user +// this is only returned the first time a token is generated +type Token struct { + SecretToken string `json:"secret_token"` + TokenInfo +} + +type ProfileClaims struct { + PreferredUsername string + Name string + Email string + SubjectDN *string + CacUID *int + X509Presented bool +} diff --git a/api/internal/dto/project.go b/api/internal/dto/project.go new file mode 100644 index 00000000..aab3a17c --- /dev/null +++ b/api/internal/dto/project.go @@ -0,0 +1,31 @@ +package dto + +import ( + "github.com/google/uuid" +) + +type District struct { + Agency string `json:"agency" db:"agency"` + ID uuid.UUID `json:"id" db:"id"` + Name string `json:"name" db:"name"` + Initials string `json:"initials" db:"initials"` + DivisionName string `json:"division_name" db:"division_name"` + DivisionInitials string `json:"division_initials" db:"division_initials"` + OfficeID *uuid.UUID `json:"office_id" db:"office_id"` +} + +type Project struct { + ID uuid.UUID `json:"id"` + Slug string `json:"slug"` + Name string `json:"name"` + FederalID *string `json:"federal_id" db:"federal_id"` + DistrictID *uuid.UUID `json:"district_id" db:"district_id"` + OfficeID *uuid.UUID `json:"office_id" db:"office_id"` + Image *string `json:"image" db:"image"` + Deleted bool `json:"-"` + InstrumentCount int `json:"instrument_count" db:"instrument_count"` + InstrumentGroupCount int `json:"instrument_group_count" db:"instrument_group_count"` + AuditInfo +} + +type ProjectCollection []Project diff --git a/api/internal/dto/project_role.go b/api/internal/dto/project_role.go new file mode 100644 index 00000000..4e76479d --- /dev/null +++ b/api/internal/dto/project_role.go @@ -0,0 +1,14 @@ +package dto + +import ( + "github.com/google/uuid" +) + +type ProjectMembership struct { + ID uuid.UUID `json:"id" db:"id"` + ProfileID uuid.UUID `json:"profile_id" db:"profile_id"` + Username *string `json:"username"` + Email string `json:"email"` + RoleID uuid.UUID `json:"role_id" db:"role_id"` + Role string `json:"role"` +} diff --git a/api/internal/dto/report_config.go b/api/internal/dto/report_config.go new file mode 100644 index 00000000..f65370fe --- /dev/null +++ b/api/internal/dto/report_config.go @@ -0,0 +1,53 @@ +package dto + +import ( + "encoding/json" + "fmt" + "time" + + "github.com/google/uuid" +) + +type ReportConfig struct { + ID uuid.UUID `json:"id"` + Slug string `json:"slug"` + Name string `json:"name"` + Description string `json:"description"` + ProjectID uuid.UUID `json:"project_id"` + ProjectName string `json:"project_name"` + DistrictName *string `json:"district_name"` + PlotConfigs []IDSlugName `json:"plot_configs"` + GlobalOverrides ReportConfigGlobalOverrides `json:"global_overrides"` + AuditInfo +} + +type ReportDownloadJob struct { + ID uuid.UUID `json:"id"` + ReportConfigID uuid.UUID `json:"report_config_id"` + CreatedBy uuid.UUID `json:"created_by"` + CreatedAt time.Time `json:"created_at"` + Status string `json:"status"` + FileKey *string `json:"file_key"` + FileExpiry *time.Time `json:"file_expiry"` + Progress int `json:"progress"` + ProgressUpdatedAt time.Time `json:"progress_updated_at"` +} + +func (o *ReportConfigGlobalOverrides) Scan(src interface{}) error { + b, ok := src.(string) + if !ok { + return fmt.Errorf("type assertion failed") + } + return json.Unmarshal([]byte(b), o) +} + +type ReportConfigWithPlotConfigs struct { + ReportConfig + PlotConfigs []PlotConfigScatterLinePlot `json:"plot_configs"` +} + +type ReportConfigJobMessage struct { + ReportConfigID uuid.UUID `json:"report_config_id"` + JobID uuid.UUID `json:"job_id"` + IsLandscape bool `json:"is_landscape"` +} diff --git a/api/internal/model/search.go b/api/internal/dto/search.go similarity index 66% rename from api/internal/model/search.go rename to api/internal/dto/search.go index e41343cb..12fa396c 100644 --- a/api/internal/model/search.go +++ b/api/internal/dto/search.go @@ -1,10 +1,9 @@ -package model +package dto import ( "github.com/google/uuid" ) -// EmailAutocompleteResult stores search result in profiles and emails type SearchResult struct { ID uuid.UUID `json:"id"` Type string `json:"type"` diff --git a/api/internal/dto/submittal.go b/api/internal/dto/submittal.go new file mode 100644 index 00000000..a2538168 --- /dev/null +++ b/api/internal/dto/submittal.go @@ -0,0 +1,23 @@ +package dto + +import ( + "time" + + "github.com/google/uuid" +) + +type Submittal struct { + ID uuid.UUID `json:"id"` + AlertConfigID uuid.UUID `json:"alert_config_id"` + AlertConfigName string `json:"alert_config_name"` + AlertTypeID uuid.UUID `json:"alert_type_id"` + AlertTypeName string `json:"alert_type_name"` + ProjectID uuid.UUID `json:"project_id"` + SubmittalStatusID uuid.UUID `json:"submittal_status_id"` + SubmittalStatusName string `json:"submittal_status_name"` + CompletedAt *time.Time `json:"completed_at"` + CreatedAt time.Time `json:"created_at"` + DueAt time.Time `json:"due_at"` + MarkedAsMissing bool `json:"marked_as_missing"` + WarningSent bool `json:"warning_sent"` +} diff --git a/api/internal/dto/timeseries.go b/api/internal/dto/timeseries.go new file mode 100644 index 00000000..bd060965 --- /dev/null +++ b/api/internal/dto/timeseries.go @@ -0,0 +1,65 @@ +package dto + +import ( + "encoding/json" + + "github.com/USACE/instrumentation-api/api/internal/util" + "github.com/google/uuid" +) + +const ( + StandardTimeseriesType = "standard" + ConstantTimeseriesType = "constant" + ComputedTimeseriesType = "computed" + CwmsTimeseriesType = "cwms" +) + +type Timeseries struct { + ID uuid.UUID `json:"id"` + Slug string `json:"slug"` + Name string `json:"name"` + Variable string `json:"variable"` + InstrumentID uuid.UUID `json:"instrument_id" db:"instrument_id"` + InstrumentSlug string `json:"instrument_slug" db:"instrument_slug"` + Instrument string `json:"instrument,omitempty"` + ParameterID uuid.UUID `json:"parameter_id" db:"parameter_id"` + Parameter string `json:"parameter,omitempty"` + UnitID uuid.UUID `json:"unit_id" db:"unit_id"` + Unit string `json:"unit,omitempty"` + Values []Measurement `json:"values,omitempty"` + Type string `json:"type" db:"type"` + IsComputed bool `json:"is_computed" db:"is_computed"` +} + +type TimeseriesNote struct { + Masked *bool `json:"masked,omitempty"` + Validated *bool `json:"validated,omitempty"` + Annotation *string `json:"annotation,omitempty"` +} + +type TimeseriesCollectionItems struct { + Items []Timeseries +} + +func (c *TimeseriesCollectionItems) UnmarshalJSON(b []byte) error { + switch util.JSONType(b) { + case "ARRAY": + if err := json.Unmarshal(b, &c.Items); err != nil { + return err + } + case "OBJECT": + var t Timeseries + if err := json.Unmarshal(b, &t); err != nil { + return err + } + c.Items = []Timeseries{t} + default: + c.Items = make([]Timeseries, 0) + } + return nil +} + +var ( + UnknownParameterID = uuid.MustParse("2b7f96e1-820f-4f61-ba8f-861640af6232") + UnknownUnitID = uuid.MustParse("4a999277-4cf5-4282-93ce-23b33c65e2c8") +) diff --git a/api/internal/dto/timeseries_calculated.go b/api/internal/dto/timeseries_calculated.go new file mode 100644 index 00000000..eda9ab27 --- /dev/null +++ b/api/internal/dto/timeseries_calculated.go @@ -0,0 +1,15 @@ +package dto + +import ( + "github.com/google/uuid" +) + +type CalculatedTimeseries struct { + ID uuid.UUID `json:"id" db:"id"` + InstrumentID uuid.UUID `json:"instrument_id" db:"instrument_id"` + ParameterID uuid.UUID `json:"parameter_id" db:"parameter_id"` + UnitID uuid.UUID `json:"unit_id" db:"unit_id"` + Slug string `json:"slug" db:"slug"` + FormulaName string `json:"formula_name" db:"formula_name"` + Formula string `json:"formula" db:"formula"` +} diff --git a/api/internal/dto/timeseries_cwms.go b/api/internal/dto/timeseries_cwms.go new file mode 100644 index 00000000..96c957e7 --- /dev/null +++ b/api/internal/dto/timeseries_cwms.go @@ -0,0 +1,13 @@ +package dto + +import ( + "time" +) + +type TimeseriesCwms struct { + Timeseries + CwmsTimeseriesID string `json:"cwms_timeseries_id" db:"cwms_timeseries_id"` + CwmsOfficeID string `json:"cwms_office_id" db:"cwms_office_id"` + CwmsExtentEarliestTime time.Time `json:"cwms_extent_earliest_time" db:"cwms_extent_earliest_time"` + CwmsExtentLatestTime *time.Time `json:"cwms_extent_latest_time" db:"cwms_extent_latest_time"` +} diff --git a/api/internal/model/unit.go b/api/internal/dto/unit.go similarity index 56% rename from api/internal/model/unit.go rename to api/internal/dto/unit.go index d8377517..88896ca0 100644 --- a/api/internal/model/unit.go +++ b/api/internal/dto/unit.go @@ -1,12 +1,9 @@ -package model +package dto import ( - "context" - "github.com/google/uuid" ) -// Unit is a unit data structure type Unit struct { ID uuid.UUID `json:"id"` Name string `json:"name"` @@ -21,18 +18,3 @@ var ( MeterUnitID = uuid.MustParse("ae06a7db-1e18-4994-be41-9d5a408d6cad") FeetUnitID = uuid.MustParse("f777f2e2-5e32-424e-a1ca-19d16cd8abce") ) - -const listUnits = ` - SELECT id, name, abbreviation, unit_family_id, unit_family, measure_id, measure - FROM v_unit - ORDER BY name -` - -// ListUnits returns a slice of units -func (q *Queries) ListUnits(ctx context.Context) ([]Unit, error) { - uu := make([]Unit, 0) - if err := q.db.SelectContext(ctx, &uu, listUnits); err != nil { - return nil, err - } - return uu, nil -} diff --git a/api/internal/dto/uploader.go b/api/internal/dto/uploader.go new file mode 100644 index 00000000..f54ceb00 --- /dev/null +++ b/api/internal/dto/uploader.go @@ -0,0 +1,37 @@ +package dto + +import ( + "github.com/google/uuid" +) + +type UploaderConfigType string + +const ( + CSV, DUX, TOA5 UploaderConfigType = "csv", "dux", "toa5" +) + +type UploaderConfig struct { + ID uuid.UUID `json:"id"` + ProjectID uuid.UUID `json:"project_id"` + Name string `json:"name"` + Slug string `json:"slug"` + Description string `json:"description"` + Type UploaderConfigType `json:"type"` + TzName string `json:"tz_name"` + TimeField string `json:"time_field"` + ValidatedFieldEnabled bool `json:"validated_field_enabled"` + ValidatedField *string `json:"validated_field"` + MaskedFieldEnabled bool `json:"masked_field_enabled"` + MaskedField *string `json:"masked_field"` + CommentFieldEnabled bool `json:"comment_field_enabled"` + CommentField *string `json:"comment_field"` + ColumnOffset int32 `json:"column_offset"` + RowOffset int32 `json:"row_offset"` + AuditInfo +} + +type UploaderConfigMapping struct { + UploaderConfigID uuid.UUID `json:"-" db:"uploader_config_id"` + FieldName string `json:"field_name" db:"field_name"` + TimeseriesID *uuid.UUID `json:"timeseries_id" db:"timeseries_id"` +} diff --git a/api/internal/email/email.go b/api/internal/email/email.go index 843da825..ed5d53b1 100644 --- a/api/internal/email/email.go +++ b/api/internal/email/email.go @@ -54,7 +54,7 @@ func FormatAlertConfigTemplates(templContent EmailTemplateContent, data any) (Em }, nil } -func ConstructAndSendEmail(ec EmailContent, cfg config.AlertCheckConfig) error { +func ConstructAndSendEmail(ec EmailContent, cfg *config.AlertCheckConfig) error { if len(ec.To) == 0 { if cfg.EmailSendMocked { log.Print("no email subs") diff --git a/api/internal/handler/alert.go b/api/internal/handler/alert.go index a80096fb..90b7e486 100644 --- a/api/internal/handler/alert.go +++ b/api/internal/handler/alert.go @@ -3,8 +3,8 @@ package handler import ( "net/http" + "github.com/USACE/instrumentation-api/api/internal/db" "github.com/USACE/instrumentation-api/api/internal/httperr" - "github.com/USACE/instrumentation-api/api/internal/model" "github.com/google/uuid" "github.com/labstack/echo/v4" @@ -18,7 +18,7 @@ import ( // @Produce json // @Param project_id path string true "project uuid" Format(uuid) // @Param instrument_id path string true "instrument uuid" Format(uuid) -// @Success 200 {array} model.Alert +// @Success 200 {array} db.VAlert // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -28,30 +28,30 @@ func (h *ApiHandler) ListAlertsForInstrument(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - aa, err := h.AlertService.GetAllAlertsForInstrument(c.Request().Context(), instrumentID) + aa, err := h.DBService.AlertListForInstrument(c.Request().Context(), instrumentID) if err != nil { return httperr.InternalServerError(err) } return c.JSON(http.StatusOK, aa) } -// ListMyAlerts godoc +// ListAlertsForProfile godoc // // @Summary lists subscribed alerts for a single user // @Description list all alerts a profile is subscribed to // @Tags alert // @Produce json // @Param key query string false "api key" -// @Success 200 {array} model.Alert +// @Success 200 {array} db.AlertListForProfileRow // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError // @Router /my_alerts [get] // @Security Bearer -func (h *ApiHandler) ListMyAlerts(c echo.Context) error { - p := c.Get("profile").(model.Profile) +func (h *ApiHandler) ListAlertsForProfile(c echo.Context) error { + p := c.Get("profile").(db.VProfile) profileID := p.ID - aa, err := h.AlertService.GetAllAlertsForProfile(c.Request().Context(), profileID) + aa, err := h.DBService.AlertListForProfile(c.Request().Context(), profileID) if err != nil { return httperr.InternalServerError(err) } @@ -67,24 +67,27 @@ func (h *ApiHandler) ListMyAlerts(c echo.Context) error { // @Produce json // @Param alert_id path string true "alert uuid" Format(uuid) // @Param key query string false "api key" -// @Success 200 {object} model.Alert +// @Success 201 {object} db.AlertGetRow // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError // @Router /my_alerts/{alert_id}/read [post] // @Security Bearer func (h *ApiHandler) DoAlertRead(c echo.Context) error { - p := c.Get("profile").(model.Profile) + p := c.Get("profile").(db.VProfile) profileID := p.ID alertID, err := uuid.Parse(c.Param("alert_id")) if err != nil { return httperr.MalformedID(err) } - a, err := h.AlertService.DoAlertRead(c.Request().Context(), profileID, alertID) + a, err := h.DBService.AlertReadCreate(c.Request().Context(), db.AlertReadCreateParams{ + ProfileID: profileID, + AlertID: alertID, + }) if err != nil { return httperr.InternalServerError(err) } - return c.JSON(http.StatusOK, a) + return c.JSON(http.StatusCreated, a) } // DoAlertUnread godoc @@ -96,20 +99,23 @@ func (h *ApiHandler) DoAlertRead(c echo.Context) error { // @Produce json // @Param alert_id path string true "alert uuid" Format(uuid) // @Param key query string false "api key" -// @Success 200 {object} model.Alert +// @Success 200 {object} db.AlertGetRow // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError // @Router /my_alerts/{alert_id}/unread [post] // @Security Bearer func (h *ApiHandler) DoAlertUnread(c echo.Context) error { - p := c.Get("profile").(model.Profile) + p := c.Get("profile").(db.VProfile) profileID := p.ID alertID, err := uuid.Parse(c.Param("alert_id")) if err != nil { return httperr.MalformedID(err) } - a, err := h.AlertService.DoAlertUnread(c.Request().Context(), profileID, alertID) + a, err := h.DBService.AlertReadDelete(c.Request().Context(), db.AlertReadDeleteParams{ + ProfileID: profileID, + AlertID: alertID, + }) if err != nil { return httperr.InternalServerError(err) } diff --git a/api/internal/handler/alert_check.go b/api/internal/handler/alert_check.go index 26af1bb8..ffba80a6 100644 --- a/api/internal/handler/alert_check.go +++ b/api/internal/handler/alert_check.go @@ -6,5 +6,5 @@ import ( func (h *AlertCheckHandler) DoAlertChecks() error { ctx := context.Background() - return h.AlertCheckService.DoAlertChecks(ctx) + return h.DBService.DoAlertChecks(ctx, h.Config) } diff --git a/api/internal/handler/alert_config.go b/api/internal/handler/alert_config.go index 6a25335e..c5dc51d0 100644 --- a/api/internal/handler/alert_config.go +++ b/api/internal/handler/alert_config.go @@ -4,41 +4,45 @@ import ( "net/http" "time" + "github.com/USACE/instrumentation-api/api/internal/db" + "github.com/USACE/instrumentation-api/api/internal/dto" "github.com/USACE/instrumentation-api/api/internal/httperr" - "github.com/USACE/instrumentation-api/api/internal/model" "github.com/google/uuid" "github.com/labstack/echo/v4" ) -// GetAllAlertConfigsForProject godoc +// ListAlertConfigsForProject godoc // // @Summary lists alert configs for a project // @Tags alert-config // @Produce json // @Param project_id path string true "project uuid" Format(uuid) -// @Success 200 {array} model.AlertConfig +// @Success 200 {array} db.VAlertConfig // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError // @Router /projects/{project_id}/alert_configs [get] -func (h *ApiHandler) GetAllAlertConfigsForProject(c echo.Context) error { +func (h *ApiHandler) ListAlertConfigsForProject(c echo.Context) error { projectID, err := uuid.Parse(c.Param("project_id")) if err != nil { return httperr.MalformedID(err) } - var aa []model.AlertConfig + var aa []db.VAlertConfig if qp := c.QueryParam("alert_type_id"); qp != "" { alertTypeID, err := uuid.Parse(qp) if err != nil { return httperr.MalformedID(err) } - aa, err = h.AlertConfigService.GetAllAlertConfigsForProjectAndAlertType(c.Request().Context(), projectID, alertTypeID) + aa, err = h.DBService.AlertConfigListForProjectAlertType(c.Request().Context(), db.AlertConfigListForProjectAlertTypeParams{ + ProjectID: projectID, + AlertTypeID: alertTypeID, + }) if err != nil { return httperr.InternalServerError(err) } } else { - aa, err = h.AlertConfigService.GetAllAlertConfigsForProject(c.Request().Context(), projectID) + aa, err = h.DBService.AlertConfigListForProject(c.Request().Context(), projectID) if err != nil { return httperr.InternalServerError(err) } @@ -53,7 +57,7 @@ func (h *ApiHandler) GetAllAlertConfigsForProject(c echo.Context) error { // @Produce json // @Param project_id path string true "project uuid" Format(uuid) // @Param instrument_id path string true "instrument uuid" Format(uuid) -// @Success 200 {array} model.AlertConfig +// @Success 200 {array} db.VAlertConfig // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -63,7 +67,7 @@ func (h *ApiHandler) ListInstrumentAlertConfigs(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - aa, err := h.AlertConfigService.GetAllAlertConfigsForInstrument(c.Request().Context(), instrumentID) + aa, err := h.DBService.AlertConfigListForInstrument(c.Request().Context(), instrumentID) if err != nil { return httperr.InternalServerError(err) } @@ -77,7 +81,7 @@ func (h *ApiHandler) ListInstrumentAlertConfigs(c echo.Context) error { // @Produce json // @Param project_id path string true "project uuid" Format(uuid) // @Param alert_config_id path string true "alert config uuid" Format(uuid) -// @Success 200 {object} model.AlertConfig +// @Success 200 {object} db.VAlertConfig // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -87,7 +91,7 @@ func (h *ApiHandler) GetAlertConfig(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - a, err := h.AlertConfigService.GetOneAlertConfig(c.Request().Context(), acID) + a, err := h.DBService.AlertConfigGet(c.Request().Context(), acID) if err != nil { return httperr.InternalServerError(err) } @@ -101,16 +105,16 @@ func (h *ApiHandler) GetAlertConfig(c echo.Context) error { // @Accept json // @Produce json // @Param project_id path string true "project uuid" Format(uuid) -// @Param alert_config body model.AlertConfig true "alert config payload" +// @Param alert_config body dto.AlertConfig true "alert config payload" // @Param key query string false "api key" -// @Success 200 {object} model.AlertConfig +// @Success 200 {object} db.VAlertConfig // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError // @Router /projects/{project_id}/alert_configs [post] // @Security Bearer func (h *ApiHandler) CreateAlertConfig(c echo.Context) error { - ac := model.AlertConfig{} + var ac dto.AlertConfig if err := c.Bind(&ac); err != nil { return httperr.MalformedBody(err) } @@ -118,10 +122,10 @@ func (h *ApiHandler) CreateAlertConfig(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - profile := c.Get("profile").(model.Profile) - ac.ProjectID, ac.CreatorID, ac.CreateDate = projectID, profile.ID, time.Now() + profile := c.Get("profile").(db.VProfile) + ac.ProjectID, ac.CreatedBy, ac.CreatedAt = projectID, profile.ID, time.Now() - acNew, err := h.AlertConfigService.CreateAlertConfig(c.Request().Context(), ac) + acNew, err := h.DBService.AlertConfigCreate(c.Request().Context(), ac) if err != nil { return httperr.InternalServerError(err) } @@ -136,16 +140,16 @@ func (h *ApiHandler) CreateAlertConfig(c echo.Context) error { // @Produce json // @Param project_id path string true "project uuid" Format(uuid) // @Param alert_config_id path string true "alert config uuid" Format(uuid) -// @Param alert_config body model.AlertConfig true "alert config payload" +// @Param alert_config body dto.AlertConfig true "alert config payload" // @Param key query string false "api key" -// @Success 200 {array} model.AlertConfig +// @Success 200 {object} db.VAlertConfig // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError // @Router /projects/{project_id}/alert_configs/{alert_config_id} [put] // @Security Bearer func (h *ApiHandler) UpdateAlertConfig(c echo.Context) error { - var ac model.AlertConfig + var ac dto.AlertConfig if err := c.Bind(&ac); err != nil { return httperr.MalformedBody(err) } @@ -153,10 +157,10 @@ func (h *ApiHandler) UpdateAlertConfig(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - p := c.Get("profile").(model.Profile) + p := c.Get("profile").(db.VProfile) t := time.Now() - ac.UpdaterID, ac.UpdateDate = &p.ID, &t - aUpdated, err := h.AlertConfigService.UpdateAlertConfig(c.Request().Context(), acID, ac) + ac.UpdatedBy, ac.UpdatedAt = &p.ID, &t + aUpdated, err := h.DBService.AlertConfigUpdate(c.Request().Context(), acID, ac) if err != nil { return httperr.InternalServerError(err) } @@ -171,7 +175,7 @@ func (h *ApiHandler) UpdateAlertConfig(c echo.Context) error { // @Param project_id path string true "Project ID" Format(uuid) // @Param alert_config_id path string true "instrument uuid" Format(uuid) // @Param key query string false "api key" -// @Success 200 {array} model.AlertConfig +// @Success 200 {object} map[string]interface{} // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -182,7 +186,7 @@ func (h *ApiHandler) DeleteAlertConfig(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - if err := h.AlertConfigService.DeleteAlertConfig(c.Request().Context(), acID); err != nil { + if err := h.DBService.AlertConfigDelete(c.Request().Context(), acID); err != nil { return httperr.InternalServerError(err) } return c.JSON(http.StatusOK, make(map[string]interface{})) diff --git a/api/internal/handler/alert_config_test.go b/api/internal/handler/alert_config_test.go index 8600f082..d8a73af3 100644 --- a/api/internal/handler/alert_config_test.go +++ b/api/internal/handler/alert_config_test.go @@ -36,22 +36,22 @@ var alertConfigSchema = fmt.Sprintf(`{ "project_id": { "type": "string" }, "alert_type_id": { "type": "string" }, "alert_type": { "type": "string" }, - "start_date": { "type": "string" }, + "started_at": { "type": "string" }, "schedule_interval": { "type": "string" }, "mute_consecutive_alerts": { "type": "boolean" }, "remind_interval": { "type": ["string", "null"] }, "warning_interval": { "type": ["string", "null"] }, - "last_checked": { "type": ["string", "null"], "format": "date-time" }, - "last_reminded": { "type": ["string", "null"], "format": "date-time" }, + "last_checked_at": { "type": ["string", "null"], "format": "date-time" }, + "last_reminded_at": { "type": ["string", "null"], "format": "date-time" }, "instruments": { "type": "array", "items": %s }, "alert_email_subscriptions": { "type": "array", "items": %s }, "alert_status": { "type": "string" }, - "creator_id": { "type": "string" }, - "creator_username": { "type": "string" }, - "create_date": { "type": "string", "format": "date-time" }, - "updater_id": { "type": ["string", "null"] }, - "updater_username": { "type": ["string", "null"] }, - "update_date": { "type": ["string", "null"], "format": "date-time" } + "created_by": { "type": "string" }, + "created_by_username": { "type": "string" }, + "created_at": { "type": "string", "format": "date-time" }, + "updated_by": { "type": ["string", "null"] }, + "updated_by_username": { "type": ["string", "null"] }, + "updated_at": { "type": ["string", "null"], "format": "date-time" } }, "additionalProperties": true }`, alertConfigInstrumentSchema, alertConfigEmailSchema) @@ -73,7 +73,7 @@ const createAlertConfigBody = `{ "name": "New Test Alert Config", "body": "New Test Alert Config Description", "alert_type_id": "97e7a25c-d5c7-4ded-b272-1bb6e5914fe3", - "start_date": "2023-05-16T13:19:41.441328Z", + "started_at": "2023-05-16T13:19:41.441328Z", "schedule_interval": "P1D", "mute_consecutive_alerts": true, "warning_interval": "PT1H", @@ -107,7 +107,7 @@ const updateAlertConfigBody = `{ "name": "Updated Test Alert 1", "body": "Updated Alert for demonstration purposes.", "project_id": "5b6f4f37-7755-4cf9-bd02-94f1e9bc5984", - "start_date": "2023-05-16T13:19:41.441328Z", + "started_at": "2023-05-16T13:19:41.441328Z", "schedule_interval": "P3D", "mute_consecutive_alerts": false, "remind_interval": "P1D", diff --git a/api/internal/handler/alert_subscription.go b/api/internal/handler/alert_subscription.go index 4cd13b97..ad98ec81 100644 --- a/api/internal/handler/alert_subscription.go +++ b/api/internal/handler/alert_subscription.go @@ -4,8 +4,9 @@ import ( "errors" "net/http" + "github.com/USACE/instrumentation-api/api/internal/db" + "github.com/USACE/instrumentation-api/api/internal/dto" "github.com/USACE/instrumentation-api/api/internal/httperr" - "github.com/USACE/instrumentation-api/api/internal/model" "github.com/google/uuid" "github.com/labstack/echo/v4" @@ -20,21 +21,21 @@ import ( // @Param instrument_id path string true "instrument uuid" Format(uuid) // @Param alert_config_id path string true "alert config uuid" Format(uuid) // @Param key query string false "api key" -// @Success 200 {object} model.AlertSubscription +// @Success 201 {object} db.AlertProfileSubscription // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError // @Router /projects/{project_id}/instruments/{instrument_id}/alert_configs/{alert_config_id}/subscribe [post] // @Security Bearer func (h *ApiHandler) SubscribeProfileToAlerts(c echo.Context) error { - p := c.Get("profile").(model.Profile) + p := c.Get("profile").(db.VProfile) profileID := p.ID alertConfigID, err := uuid.Parse(c.Param("alert_config_id")) if err != nil { return httperr.MalformedID(err) } - pa, err := h.AlertSubscriptionService.SubscribeProfileToAlerts(c.Request().Context(), alertConfigID, profileID) + pa, err := h.DBService.AlertProfileSubscriptionCreateForAlertConfigProfile(c.Request().Context(), alertConfigID, profileID) if err != nil { return httperr.InternalServerError(err) } @@ -57,14 +58,17 @@ func (h *ApiHandler) SubscribeProfileToAlerts(c echo.Context) error { // @Router /projects/{project_id}/instruments/{instrument_id}/alert_configs/{alert_config_id}/unsubscribe [post] // @Security Bearer func (h *ApiHandler) UnsubscribeProfileToAlerts(c echo.Context) error { - p := c.Get("profile").(model.Profile) + p := c.Get("profile").(db.VProfile) profileID := p.ID alertConfigID, err := uuid.Parse(c.Param("alert_config_id")) if err != nil { return httperr.MalformedID(err) } - if err = h.AlertSubscriptionService.UnsubscribeProfileToAlerts(c.Request().Context(), alertConfigID, profileID); err != nil { + if err = h.DBService.AlertProfileSubscriptionDelete(c.Request().Context(), db.AlertProfileSubscriptionDeleteParams{ + AlertConfigID: alertConfigID, + ProfileID: profileID, + }); err != nil { return httperr.InternalServerError(err) } return c.JSON(http.StatusOK, make(map[string]interface{})) @@ -76,16 +80,16 @@ func (h *ApiHandler) UnsubscribeProfileToAlerts(c echo.Context) error { // @Tags alert-subscription // @Produce json // @Param key query string false "api key" -// @Success 200 {array} model.AlertSubscription +// @Success 200 {array} db.AlertProfileSubscription // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError // @Router /my_alert_subscriptions [get] // @Security Bearer func (h *ApiHandler) ListMyAlertSubscriptions(c echo.Context) error { - p := c.Get("profile").(model.Profile) + p := c.Get("profile").(db.VProfile) profileID := p.ID - ss, err := h.AlertSubscriptionService.ListMyAlertSubscriptions(c.Request().Context(), profileID) + ss, err := h.DBService.AlertSubscriptionListForProfile(c.Request().Context(), profileID) if err != nil { return httperr.InternalServerError(err) } @@ -99,16 +103,16 @@ func (h *ApiHandler) ListMyAlertSubscriptions(c echo.Context) error { // @Accept json // @Produce json // @Param alert_subscription_id path string true "alert subscription id" Format(uuid) -// @Param alert_subscription body model.AlertSubscription true "alert subscription payload" +// @Param alert_subscription body dto.AlertSubscription true "alert subscription payload" // @Param key query string false "api key" -// @Success 200 {array} model.AlertSubscription +// @Success 200 {array} db.AlertProfileSubscription // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError // @Router /alert_subscriptions/{alert_subscription_id} [put] // @Security Bearer func (h *ApiHandler) UpdateMyAlertSubscription(c echo.Context) error { - var s model.AlertSubscription + var s dto.AlertSubscription if err := c.Bind(&s); err != nil { return httperr.MalformedBody(err) } @@ -118,15 +122,15 @@ func (h *ApiHandler) UpdateMyAlertSubscription(c echo.Context) error { } s.ID = sID - p := c.Get("profile").(model.Profile) - t, err := h.AlertSubscriptionService.GetAlertSubscriptionByID(c.Request().Context(), sID) + p := c.Get("profile").(db.VProfile) + t, err := h.DBService.AlertSubscriptionGet(c.Request().Context(), sID) if err != nil { return httperr.InternalServerError(err) } if p.ID != t.ProfileID { return httperr.Unauthorized(errors.New("profile id or requester did not match alert subscription id")) } - sUpdated, err := h.AlertSubscriptionService.UpdateMyAlertSubscription(c.Request().Context(), s) + sUpdated, err := h.DBService.AlertProfileSubscriptionUpdateForProfile(c.Request().Context(), s) if err != nil { return httperr.InternalServerError(err) } diff --git a/api/internal/handler/alert_test.go b/api/internal/handler/alert_test.go index c3dc5758..237e0ec7 100644 --- a/api/internal/handler/alert_test.go +++ b/api/internal/handler/alert_test.go @@ -27,9 +27,9 @@ var alertSchema = fmt.Sprintf(`{ "instruments": { "type": "array", "items": %s }, "name": { "type": "string" }, "body": { "type": "string" }, - "create_date": { "type": "string", "format": "date-time" } + "created_at": { "type": "string", "format": "date-time" } }, - "required": ["id", "alert_config_id", "project_id", "project_name", "instruments", "name", "body", "create_date"], + "required": ["id", "alert_config_id", "project_id", "project_name", "instruments", "name", "body", "created_at"], "additionalProperties": true }`, alertSubAlertConfigInstrumentSchema) @@ -65,7 +65,7 @@ func TestAlerts(t *testing.T) { Name: "DoAlertRead", URL: fmt.Sprintf("/my_alerts/%s/read", testAlertSubAlertID), Method: http.MethodPost, - ExpectedStatus: http.StatusOK, + ExpectedStatus: http.StatusCreated, ExpectedSchema: objSchema, }, { diff --git a/api/internal/handler/autocomplete.go b/api/internal/handler/autocomplete.go index cd9594ed..28d0cb3c 100644 --- a/api/internal/handler/autocomplete.go +++ b/api/internal/handler/autocomplete.go @@ -1,8 +1,9 @@ package handler import ( + "github.com/USACE/instrumentation-api/api/internal/db" + "github.com/USACE/instrumentation-api/api/internal/dto" "github.com/USACE/instrumentation-api/api/internal/httperr" - "github.com/USACE/instrumentation-api/api/internal/model" "net/http" @@ -15,7 +16,7 @@ import ( // @Tags autocomplete // @Produce json // @Param q query string true "search query string" -// @Success 200 {array} model.EmailAutocompleteResult +// @Success 200 {array} db.EmailAutocompleteListRow // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -23,10 +24,13 @@ import ( func (h *ApiHandler) ListEmailAutocomplete(c echo.Context) error { searchText := c.QueryParam("q") if searchText == "" { - return c.JSON(http.StatusOK, make([]model.EmailAutocompleteResult, 0)) + return c.JSON(http.StatusOK, make([]dto.EmailAutocompleteResult, 0)) } - limit := 5 - rr, err := h.EmailAutocompleteService.ListEmailAutocomplete(c.Request().Context(), searchText, limit) + var limit int32 = 5 + rr, err := h.DBService.EmailAutocompleteList(c.Request().Context(), db.EmailAutocompleteListParams{ + SearchKeyword: &searchText, + ResultLimit: limit, + }) if err != nil { return httperr.InternalServerError(err) } diff --git a/api/internal/handler/aware.go b/api/internal/handler/aware.go index 9ea5391c..2bd0205f 100644 --- a/api/internal/handler/aware.go +++ b/api/internal/handler/aware.go @@ -3,8 +3,10 @@ package handler import ( "net/http" + _ "github.com/USACE/instrumentation-api/api/internal/db" + _ "github.com/USACE/instrumentation-api/api/internal/dto" "github.com/USACE/instrumentation-api/api/internal/httperr" - _ "github.com/USACE/instrumentation-api/api/internal/model" + _ "github.com/USACE/instrumentation-api/api/internal/service" "github.com/labstack/echo/v4" ) @@ -13,13 +15,13 @@ import ( // @Summary lists alert configs for a project // @Tags aware // @Produce json -// @Success 200 {array} model.AwareParameter +// @Success 200 {array} db.AwareParameterListRow // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError // @Router /aware/parameters [get] func (h *ApiHandler) ListAwareParameters(c echo.Context) error { - pp, err := h.AwareParameterService.ListAwareParameters(c.Request().Context()) + pp, err := h.DBService.AwareParameterList(c.Request().Context()) if err != nil { return httperr.InternalServerError(err) } @@ -31,13 +33,13 @@ func (h *ApiHandler) ListAwareParameters(c echo.Context) error { // @Summary lists alert configs for a project // @Tags aware // @Produce json -// @Success 200 {array} model.AwarePlatformParameterConfig +// @Success 200 {array} service.AwarePlatformParameterConfig // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError // @Router /aware/data_acquisition_config [get] func (h *ApiHandler) ListAwarePlatformParameterConfig(c echo.Context) error { - cc, err := h.AwareParameterService.ListAwarePlatformParameterConfig(c.Request().Context()) + cc, err := h.DBService.AwarePlatformParameterConfigList(c.Request().Context()) if err != nil { return httperr.InternalServerError(err) } diff --git a/api/internal/handler/collection_groups.go b/api/internal/handler/collection_groups.go index adf100ea..ef7781ec 100644 --- a/api/internal/handler/collection_groups.go +++ b/api/internal/handler/collection_groups.go @@ -2,14 +2,15 @@ package handler import ( "net/http" + "strconv" "time" + "github.com/USACE/instrumentation-api/api/internal/db" + "github.com/USACE/instrumentation-api/api/internal/dto" "github.com/USACE/instrumentation-api/api/internal/httperr" "github.com/google/uuid" - "github.com/USACE/instrumentation-api/api/internal/model" - "github.com/labstack/echo/v4" ) @@ -19,7 +20,7 @@ import ( // @Tags collection-groups // @Produce json // @Param project_id path string true "project uuid" Format(uuid) -// @Success 200 {array} model.AlertConfig +// @Success 200 {array} db.CollectionGroup // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -29,7 +30,7 @@ func (h *ApiHandler) ListCollectionGroups(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - cc, err := h.CollectionGroupService.ListCollectionGroups(c.Request().Context(), pID) + cc, err := h.DBService.CollectionGroupListForProject(c.Request().Context(), pID) if err != nil { return httperr.InternalServerError(err) } @@ -43,13 +44,13 @@ func (h *ApiHandler) ListCollectionGroups(c echo.Context) error { // @Produce json // @Param project_id path string true "project uuid" Format(uuid) // @Param collection_group_id path string true "collection group uuid" Format(uuid) -// @Success 200 {object} model.CollectionGroupDetails +// @Success 200 {object} db.VCollectionGroupDetails // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError // @Router /projects/{project_id}/collection_groups/{collection_group_id} [get] func (h *ApiHandler) GetCollectionGroupDetails(c echo.Context) error { - pID, err := uuid.Parse(c.Param("project_id")) + _, err := uuid.Parse(c.Param("project_id")) if err != nil { return httperr.MalformedID(err) } @@ -57,7 +58,7 @@ func (h *ApiHandler) GetCollectionGroupDetails(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - d, err := h.CollectionGroupService.GetCollectionGroupDetails(c.Request().Context(), pID, cgID) + d, err := h.DBService.CollectionGroupDetailsGet(c.Request().Context(), cgID) if err != nil { return httperr.InternalServerError(err) } @@ -71,16 +72,16 @@ func (h *ApiHandler) GetCollectionGroupDetails(c echo.Context) error { // @Tags collection-groups // @Produce json // @Param project_id path string true "project uuid" Format(uuid) -// @Param collection_group body model.CollectionGroup true "collection group payload" +// @Param collection_group body dto.CollectionGroup true "collection group payload" // @Param key query string false "api key" -// @Success 200 {array} model.CollectionGroup +// @Success 200 {array} db.CollectionGroup // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError // @Router /projects/{project_id}/collection_groups [post] // @Security Bearer func (h *ApiHandler) CreateCollectionGroup(c echo.Context) error { - var cg model.CollectionGroup + var cg dto.CollectionGroup // Bind Information Provided if err := c.Bind(&cg); err != nil { return httperr.MalformedBody(err) @@ -91,14 +92,14 @@ func (h *ApiHandler) CreateCollectionGroup(c echo.Context) error { return httperr.MalformedID(err) } cg.ProjectID = pID - p := c.Get("profile").(model.Profile) - cg.CreatorID, cg.CreateDate = p.ID, time.Now() + p := c.Get("profile").(db.VProfile) + cg.CreatedBy, cg.CreatedAt = p.ID, time.Now() - cgNew, err := h.CollectionGroupService.CreateCollectionGroup(c.Request().Context(), cg) + cgNew, err := h.DBService.CollectionGroupCreate(c.Request().Context(), cg) if err != nil { return httperr.InternalServerError(err) } - return c.JSON(http.StatusCreated, []model.CollectionGroup{cgNew}) + return c.JSON(http.StatusCreated, []db.CollectionGroup{cgNew}) } // UpdateCollectionGroup godoc @@ -108,16 +109,16 @@ func (h *ApiHandler) CreateCollectionGroup(c echo.Context) error { // @Produce json // @Param project_id path string true "project uuid" Format(uuid) // @Param collection_group_id path string true "collection group uuid" -// @Param collection_group body model.CollectionGroup true "collection group payload" +// @Param collection_group body dto.CollectionGroup true "collection group payload" // @Param key query string false "api key" -// @Success 200 {object} model.CollectionGroup +// @Success 200 {object} db.CollectionGroup // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError // @Router /projects/{project_id}/collection_groups/{collection_group_id} [put] // @Security Bearer func (h *ApiHandler) UpdateCollectionGroup(c echo.Context) error { - var cg model.CollectionGroup + var cg dto.CollectionGroup if err := c.Bind(&cg); err != nil { return httperr.MalformedBody(err) } @@ -134,14 +135,14 @@ func (h *ApiHandler) UpdateCollectionGroup(c echo.Context) error { } cg.ID = cgID - p := c.Get("profile").(model.Profile) + p := c.Get("profile").(db.VProfile) t := time.Now() - cg.UpdaterID, cg.UpdateDate = &p.ID, &t - cgUpdated, err := h.CollectionGroupService.UpdateCollectionGroup(c.Request().Context(), cg) + cg.UpdatedBy, cg.UpdatedAt = &p.ID, &t + cgUpdated, err := h.DBService.CollectionGroupUpdate(c.Request().Context(), cg) if err != nil { return httperr.InternalServerError(err) } - return c.JSON(http.StatusCreated, cgUpdated) + return c.JSON(http.StatusOK, cgUpdated) } // DeleteCollectionGroup godoc @@ -167,7 +168,10 @@ func (h *ApiHandler) DeleteCollectionGroup(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - if err := h.CollectionGroupService.DeleteCollectionGroup(c.Request().Context(), pID, cgID); err != nil { + if err := h.DBService.CollectionGroupDelete(c.Request().Context(), db.CollectionGroupDeleteParams{ + ProjectID: pID, + ID: cgID, + }); err != nil { return httperr.InternalServerError(err) } return c.JSON(http.StatusOK, make(map[string]interface{})) @@ -182,7 +186,7 @@ func (h *ApiHandler) DeleteCollectionGroup(c echo.Context) error { // @Param collection_group_id path string true "collection group uuid" Format(uuid) // @Param timeseries_id path string true "timeseries uuid" Format(uuid) // @Param key query string false "api key" -// @Success 200 {object} map[string]interface{} +// @Success 201 {object} map[string]interface{} // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -197,7 +201,65 @@ func (h *ApiHandler) AddTimeseriesToCollectionGroup(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - if err := h.CollectionGroupService.AddTimeseriesToCollectionGroup(c.Request().Context(), cgID, tsID); err != nil { + var sortOrder int32 + soParam := c.QueryParam("sort_order") + if soParam != "" { + so64, err := strconv.ParseInt(soParam, 10, 32) + if err != nil { + return httperr.BadRequest(err) + } + sortOrder = int32(so64) + } + + if err := h.DBService.CollectionGroupTimeseriesCreate(c.Request().Context(), db.CollectionGroupTimeseriesCreateParams{ + CollectionGroupID: cgID, + TimeseriesID: tsID, + SortOrder: sortOrder, + }); err != nil { + return httperr.InternalServerError(err) + } + return c.JSON(http.StatusCreated, make(map[string]interface{})) +} + +// UpdateTimeseriesCollectionGroupSortOrder godoc +// +// @Summary updates sort order for collection group timesries +// @Tags collection-groups +// @Produce json +// @Param project_id path string true "project uuid" Format(uuid) +// @Param collection_group_id path string true "collection group uuid" Format(uuid) +// @Param timeseries_id path string true "timeseries uuid" Format(uuid) +// @Param key query string false "api key" +// @Success 200 {object} map[string]interface{} +// @Failure 400 {object} echo.HTTPError +// @Failure 404 {object} echo.HTTPError +// @Failure 500 {object} echo.HTTPError +// @Router /projects/{project_id}/collection_groups/{collection_group_id}/timeseries/{timeseries_id} [put] +// @Security Bearer +func (h *ApiHandler) UpdateTimeseriesCollectionGroupSortOrder(c echo.Context) error { + cgID, err := uuid.Parse(c.Param("collection_group_id")) + if err != nil { + return httperr.MalformedID(err) + } + tsID, err := uuid.Parse(c.Param("timeseries_id")) + if err != nil { + return httperr.MalformedID(err) + } + var sortOrder int32 + soParam := c.QueryParam("sort_order") + if soParam != "" { + so64, err := strconv.ParseInt(soParam, 10, 32) + if err != nil { + return httperr.BadRequest(err) + } + sortOrder = int32(so64) + } + + if err := h.DBService.CollectionGroupTimeseriesUpdateSortOrder(c.Request().Context(), db.CollectionGroupTimeseriesUpdateSortOrderParams{ + CollectionGroupID: cgID, + TimeseriesID: tsID, + SortOrder: sortOrder, + }); err != nil { return httperr.InternalServerError(err) } return c.JSON(http.StatusOK, make(map[string]interface{})) @@ -227,7 +289,10 @@ func (h *ApiHandler) RemoveTimeseriesFromCollectionGroup(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - if err := h.CollectionGroupService.RemoveTimeseriesFromCollectionGroup(c.Request().Context(), cgID, tsID); err != nil { + if err := h.DBService.CollectionGroupTimeseriesDelete(c.Request().Context(), db.CollectionGroupTimeseriesDeleteParams{ + CollectionGroupID: cgID, + TimeseriesID: tsID, + }); err != nil { return httperr.InternalServerError(err) } return c.JSON(http.StatusOK, make(map[string]interface{})) diff --git a/api/internal/handler/collection_groups_test.go b/api/internal/handler/collection_groups_test.go index 4b15fe50..62ececc1 100644 --- a/api/internal/handler/collection_groups_test.go +++ b/api/internal/handler/collection_groups_test.go @@ -16,15 +16,18 @@ const collectionGroupSchema = `{ "project_id": { "type": "string" }, "slug": { "type": "string" }, "name": { "type": "string" }, - "creator_id": { "type": "string" }, - "create_date": { "type": "string", "format": "date-time" }, - "updater_id": { "type": ["string", "null"] }, - "update_date": { "type": ["string", "null"], "format": "date-time" } + "created_by": { "type": "string" }, + "created_at": { "type": "string", "format": "date-time" }, + "updated_by": { "type": ["string", "null"] }, + "updated_at": { "type": ["string", "null"], "format": "date-time" }, + "sort_order": { "type": "integer" } }, - "required": ["id", "project_id", "name", "slug", "creator_id", "create_date", "updater_id", "update_date"], + "required": ["id", "project_id", "name", "slug", "created_by", "created_at", "updated_by", "updated_at", "sort_order"], "additionalProperties": false }` +var collectionGroupObjectLoader = gojsonschema.NewStringLoader(collectionGroupSchema) + var collectionGroupArrayLoader = gojsonschema.NewStringLoader(fmt.Sprintf(`{ "type": "array", "items": %s @@ -37,10 +40,11 @@ const collectionGroupDetailsSchema = `{ "project_id": { "type": "string" }, "slug": { "type": "string" }, "name": { "type": "string" }, - "creator_id": { "type": "string" }, - "create_date": { "type": "string", "format": "date-time" }, - "updater_id": { "type": ["string", "null"] }, - "update_date": { "type": ["string", "null"], "format": "date-time" }, + "created_by": { "type": "string" }, + "created_at": { "type": "string", "format": "date-time" }, + "updated_by": { "type": ["string", "null"] }, + "updated_at": { "type": ["string", "null"], "format": "date-time" }, + "sort_order": { "type": "integer" }, "timeseries": { "type": "array", "items": { @@ -60,14 +64,15 @@ const collectionGroupDetailsSchema = `{ "latest_time": {"type": "string", "format": "date-time" }, "latest_value": {"type": "number" }, "is_computed": { "type": "boolean" }, + "sort_order": { "type": "integer" }, "type": { "type": "string" } }, - "required": ["id", "slug", "name", "variable", "instrument_id", "instrument", "instrument_slug", "parameter_id", "parameter", "unit_id", "unit", "latest_time", "latest_value", "is_computed", "type"], + "required": ["id", "slug", "name", "variable", "instrument_id", "instrument", "instrument_slug", "parameter_id", "parameter", "unit_id", "unit", "latest_time", "latest_value", "is_computed", "type", "sort_order"], "additionalProperties": false } } }, - "required": ["id", "project_id", "name", "slug", "creator_id", "create_date", "updater_id", "update_date", "timeseries"], + "required": ["id", "project_id", "name", "slug", "created_by", "created_at", "updated_by", "updated_at", "timeseries", "sort_order"], "additionalProperties": false }` @@ -75,12 +80,27 @@ var collectionGroupDetailsObjectLoader = gojsonschema.NewStringLoader(collection const testCollectionGroupID = "30b32cb1-0936-42c4-95d1-63a7832a57db" +var createCollectionGroupBody = `{ + "name": "test new collection group", + "sort_order": 2 +}` + +const updateCollectionGroupBody = `{ + "name": "test update collection group", + "sort_order": 3 +}` + func TestCollectionGroups(t *testing.T) { - objSchema, err := gojsonschema.NewSchema(collectionGroupDetailsObjectLoader) + objSchema, err := gojsonschema.NewSchema(collectionGroupObjectLoader) assert.Nil(t, err) if err != nil { t.Log("invalid object schema") } + detailsObjSchema, err := gojsonschema.NewSchema(collectionGroupDetailsObjectLoader) + assert.Nil(t, err) + if err != nil { + t.Log("invalid details object schema") + } arrSchema, err := gojsonschema.NewSchema(collectionGroupArrayLoader) assert.Nil(t, err) if err != nil { @@ -93,7 +113,7 @@ func TestCollectionGroups(t *testing.T) { URL: fmt.Sprintf("/projects/%s/collection_groups/%s", testProjectID, testCollectionGroupID), Method: http.MethodGet, ExpectedStatus: http.StatusOK, - ExpectedSchema: objSchema, + ExpectedSchema: detailsObjSchema, }, { Name: "ListCollectionGroups", @@ -102,6 +122,22 @@ func TestCollectionGroups(t *testing.T) { ExpectedStatus: http.StatusOK, ExpectedSchema: arrSchema, }, + { + Name: "CreateCollectionGroup", + URL: fmt.Sprintf("/projects/%s/collection_groups", testProjectID), + Method: http.MethodPost, + ExpectedStatus: http.StatusCreated, + ExpectedSchema: arrSchema, + Body: createCollectionGroupBody, + }, + { + Name: "UpdateCollectionGroup", + URL: fmt.Sprintf("/projects/%s/collection_groups/%s", testProjectID, testCollectionGroupID), + Method: http.MethodPut, + ExpectedStatus: http.StatusOK, + ExpectedSchema: objSchema, + Body: updateCollectionGroupBody, + }, { Name: "DeleteCollectionGroup", URL: fmt.Sprintf("/projects/%s/collection_groups/%s", testProjectID, testCollectionGroupID), diff --git a/api/internal/handler/datalogger.go b/api/internal/handler/datalogger.go index 16f62707..ed30ec99 100644 --- a/api/internal/handler/datalogger.go +++ b/api/internal/handler/datalogger.go @@ -6,8 +6,10 @@ import ( "net/http" "time" + "github.com/USACE/instrumentation-api/api/internal/db" + "github.com/USACE/instrumentation-api/api/internal/dto" "github.com/USACE/instrumentation-api/api/internal/httperr" - "github.com/USACE/instrumentation-api/api/internal/model" + _ "github.com/USACE/instrumentation-api/api/internal/service" "github.com/google/uuid" "github.com/labstack/echo/v4" ) @@ -18,7 +20,7 @@ import ( // @Tags datalogger // @Produce json // @Param key query string false "api key" -// @Success 200 {array} model.Datalogger +// @Success 200 {array} db.VDatalogger // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -32,7 +34,7 @@ func (h *ApiHandler) ListDataloggers(c echo.Context) error { return httperr.MalformedID(err) } - dls, err := h.DataloggerService.ListProjectDataloggers(c.Request().Context(), pID) + dls, err := h.DBService.DataloggerListForProject(c.Request().Context(), pID) if err != nil { return httperr.InternalServerError(err) } @@ -40,7 +42,8 @@ func (h *ApiHandler) ListDataloggers(c echo.Context) error { return c.JSON(http.StatusOK, dls) } - dls, err := h.DataloggerService.ListAllDataloggers(c.Request().Context()) + // TODO: do we actually need this? If so it should probably be paginated + dls, err := h.DBService.DataloggerList(c.Request().Context()) if err != nil { return httperr.InternalServerError(err) } @@ -54,9 +57,9 @@ func (h *ApiHandler) ListDataloggers(c echo.Context) error { // @Tags datalogger // @Accept json // @Produce json -// @Param datalogger body model.Datalogger true "datalogger payload" +// @Param datalogger body dto.Datalogger true "datalogger payload" // @Param key query string false "api key" -// @Success 200 {array} model.DataloggerWithKey +// @Success 200 {object} service.DataloggerWithKey // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -64,25 +67,28 @@ func (h *ApiHandler) ListDataloggers(c echo.Context) error { // @Security Bearer func (h *ApiHandler) CreateDatalogger(c echo.Context) error { ctx := c.Request().Context() - n := model.Datalogger{} + n := dto.Datalogger{} if err := c.Bind(&n); err != nil { return httperr.MalformedBody(err) } - p := c.Get("profile").(model.Profile) - n.CreatorID = p.ID + p := c.Get("profile").(db.VProfile) + n.CreatedBy = p.ID if n.Name == "" { return httperr.BadRequest(errors.New("valid `name` field required")) } - model, err := h.DataloggerService.GetDataloggerModelName(ctx, n.ModelID) + model, err := h.DBService.DataloggerGetModelName(ctx, n.ModelID) if err != nil { return httperr.BadRequest(fmt.Errorf("data logger model id %s not found", n.ModelID)) } // check if datalogger with model and sn already exists and is not deleted - exists, err := h.DataloggerService.GetDataloggerIsActive(ctx, model, n.SN) + exists, err := h.DBService.DataloggerGetActive(ctx, db.DataloggerGetActiveParams{ + Model: model, + Sn: n.SN, + }) if err != nil { return httperr.InternalServerError(err) } @@ -91,7 +97,7 @@ func (h *ApiHandler) CreateDatalogger(c echo.Context) error { return httperr.BadRequest(errors.New("active data logger model with this model and serial number already exist")) } - dl, err := h.DataloggerService.CreateDatalogger(ctx, n) + dl, err := h.DBService.DataloggerCreate(ctx, n) if err != nil { return httperr.InternalServerError(err) } @@ -106,7 +112,7 @@ func (h *ApiHandler) CreateDatalogger(c echo.Context) error { // @Produce json // @Param datalogger_id path string true "datalogger uuid" Format(uuid) // @Param key query string false "api key" -// @Success 200 {object} model.DataloggerWithKey +// @Success 200 {object} service.DataloggerWithKey // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -119,17 +125,21 @@ func (h *ApiHandler) CycleDataloggerKey(c echo.Context) error { return httperr.MalformedID(err) } - u := model.Datalogger{ID: dlID} + u := dto.Datalogger{ID: dlID} - if err := h.DataloggerService.VerifyDataloggerExists(ctx, dlID); err != nil { - return httperr.NotFound(err) + exists, err := h.DBService.DataloggerGetExists(ctx, dlID) + if err != nil { + return httperr.InternalServerError(err) + } + if !exists { + return httperr.NotFound(errors.New("datalogger does not exist")) } - profile := c.Get("profile").(model.Profile) + profile := c.Get("profile").(db.VProfile) t := time.Now() - u.UpdaterID, u.UpdateDate = &profile.ID, &t + u.UpdatedBy, u.UpdatedAt = &profile.ID, &t - dl, err := h.DataloggerService.CycleDataloggerKey(ctx, u) + dl, err := h.DBService.DataloggerHashUpdate(ctx, u) if err != nil { return httperr.InternalServerError(err) } @@ -144,7 +154,7 @@ func (h *ApiHandler) CycleDataloggerKey(c echo.Context) error { // @Produce json // @Param datalogger_id path string true "datalogger uuid" Format(uuid) // @Param key query string false "api key" -// @Success 200 {object} model.Datalogger +// @Success 200 {object} db.VDatalogger // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -155,7 +165,7 @@ func (h *ApiHandler) GetDatalogger(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - dl, err := h.DataloggerService.GetOneDatalogger(c.Request().Context(), dlID) + dl, err := h.DBService.DataloggerGet(c.Request().Context(), dlID) if err != nil { httperr.ServerErrorOrNotFound(err) } @@ -169,9 +179,9 @@ func (h *ApiHandler) GetDatalogger(c echo.Context) error { // @Tags datalogger // @Produce json // @Param datalogger_id path string true "datalogger uuid" Format(uuid) -// @Param datalogger body model.Datalogger true "datalogger payload" +// @Param datalogger body dto.Datalogger true "datalogger payload" // @Param key query string false "api key" -// @Success 200 {object} model.Datalogger +// @Success 200 {object} db.VDatalogger // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -184,21 +194,25 @@ func (h *ApiHandler) UpdateDatalogger(c echo.Context) error { return httperr.MalformedID(err) } - u := model.Datalogger{ID: dlID} + u := dto.Datalogger{ID: dlID} if err := c.Bind(&u); err != nil { return httperr.MalformedBody(err) } u.ID = dlID - if err := h.DataloggerService.VerifyDataloggerExists(ctx, dlID); err != nil { + exists, err := h.DBService.DataloggerGetExists(ctx, dlID) + if err != nil { return httperr.InternalServerError(err) } + if !exists { + return httperr.NotFound(errors.New("datalogger does not exist")) + } - profile := c.Get("profile").(model.Profile) + profile := c.Get("profile").(db.VProfile) t := time.Now() - u.UpdaterID, u.UpdateDate = &profile.ID, &t + u.UpdatedBy, u.UpdatedAt = &profile.ID, &t - dlUpdated, err := h.DataloggerService.UpdateDatalogger(ctx, u) + dlUpdated, err := h.DBService.DataloggerUpdate(ctx, u) if err != nil { return httperr.InternalServerError(err) } @@ -226,16 +240,21 @@ func (h *ApiHandler) DeleteDatalogger(c echo.Context) error { return httperr.MalformedID(err) } - if err := h.DataloggerService.VerifyDataloggerExists(ctx, dlID); err != nil { + exists, err := h.DBService.DataloggerGetExists(ctx, dlID) + if err != nil { return httperr.InternalServerError(err) } + if !exists { + return httperr.NotFound(errors.New("datalogger does not exist")) + } + profile := c.Get("profile").(db.VProfile) - d := model.Datalogger{ID: dlID} - profile := c.Get("profile").(model.Profile) t := time.Now() - d.UpdaterID, d.UpdateDate = &profile.ID, &t - - if err := h.DataloggerService.DeleteDatalogger(ctx, d); err != nil { + if err := h.DBService.DataloggerDelete(ctx, db.DataloggerDeleteParams{ + ID: dlID, + UpdatedBy: &profile.ID, + UpdatedAt: &t, + }); err != nil { return httperr.InternalServerError(err) } @@ -250,7 +269,7 @@ func (h *ApiHandler) DeleteDatalogger(c echo.Context) error { // @Param datalogger_id path string true "datalogger uuid" Format(uuid) // @Param datalogger_table_id path string true "datalogger table uuid" Format(uuid) // @Param key query string false "api key" -// @Success 200 {object} model.DataloggerTablePreview +// @Success 200 {object} db.VDataloggerPreview // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -265,7 +284,7 @@ func (h *ApiHandler) GetDataloggerTablePreview(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - preview, err := h.DataloggerService.GetDataloggerTablePreview(c.Request().Context(), dataloggerTableID) + preview, err := h.DBService.DataloggerTablePreviewGet(c.Request().Context(), dataloggerTableID) if err != nil { return httperr.ServerErrorOrNotFound(err) } @@ -280,7 +299,7 @@ func (h *ApiHandler) GetDataloggerTablePreview(c echo.Context) error { // @Param datalogger_id path string true "datalogger uuid" Format(uuid) // @Param datalogger_table_id path string true "datalogger table uuid" Format(uuid) // @Param key query string false "api key" -// @Success 200 {object} model.DataloggerTablePreview +// @Success 200 {object} map[string]interface{} // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -295,7 +314,7 @@ func (h *ApiHandler) ResetDataloggerTableName(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - if err := h.DataloggerService.ResetDataloggerTableName(c.Request().Context(), dataloggerTableID); err != nil { + if err := h.DBService.DataloggerUpdateTableNameBlank(c.Request().Context(), dataloggerTableID); err != nil { return httperr.InternalServerError(err) } return c.JSON(http.StatusOK, map[string]interface{}{"datalogger_table_id": dataloggerTableID}) diff --git a/api/internal/handler/datalogger_telemetry.go b/api/internal/handler/datalogger_telemetry.go index a32a35f4..22e6df4f 100644 --- a/api/internal/handler/datalogger_telemetry.go +++ b/api/internal/handler/datalogger_telemetry.go @@ -9,8 +9,9 @@ import ( "net/http" "time" + "github.com/USACE/instrumentation-api/api/internal/db" + "github.com/USACE/instrumentation-api/api/internal/dto" "github.com/USACE/instrumentation-api/api/internal/httperr" - "github.com/USACE/instrumentation-api/api/internal/model" "github.com/labstack/echo/v4" ) @@ -33,7 +34,10 @@ func (h *TelemetryHandler) CreateOrUpdateDataloggerMeasurements(c echo.Context) ctx := c.Request().Context() // Make sure datalogger is active - dl, err := h.DataloggerTelemetryService.GetDataloggerByModelSN(ctx, modelName, sn) + dl, err := h.DBService.DataloggerGetForModelSn(ctx, db.DataloggerGetForModelSnParams{ + Model: &modelName, + Sn: sn, + }) if err != nil { return httperr.InternalServerError(err) } @@ -48,13 +52,11 @@ func (h *TelemetryHandler) CreateOrUpdateDataloggerMeasurements(c echo.Context) return httperr.MalformedBody(err) } - var prv model.DataloggerTablePreview - if err := prv.Preview.Set(rawJSON); err != nil { - return httperr.InternalServerError(err) - } - prv.UpdateDate = time.Now() + var prv dto.DataloggerTablePreview + prv.Preview = rawJSON + prv.UpdatedAt = time.Now() - if _, err := h.DataloggerTelemetryService.UpdateDataloggerTablePreview(ctx, dl.ID, preparse, prv); err != nil { + if _, err := h.DBService.DataloggerTablePreviewUpdate(ctx, dl.ID, preparse, prv); err != nil { return httperr.InternalServerError(err) } @@ -72,7 +74,7 @@ func (h *TelemetryHandler) CreateOrUpdateDataloggerMeasurements(c echo.Context) // CSIJSON Output Format: https://help.campbellsci.com/crbasic/cr350/#parameters/mqtt_outputformat.htm?Highlight=CSIJSON // // HTTPPost: https://help.campbellsci.com/crbasic/cr350/#Instructions/httppost.htm?Highlight=httppost -func getCR6Handler(h *TelemetryHandler, dl model.Datalogger, rawJSON []byte) echo.HandlerFunc { +func getCR6Handler(h *TelemetryHandler, dl db.VDatalogger, rawJSON []byte) echo.HandlerFunc { return func(c echo.Context) error { // Errors are cellected and sent to datalogger preview for debugging since datalogger clients cannot parse responses em := make([]string, 0) @@ -83,21 +85,21 @@ func getCR6Handler(h *TelemetryHandler, dl model.Datalogger, rawJSON []byte) ech // to collect logs to be previewed in the core web application. The error code returned to the client datalogger // will sill be relavent to the arm of control flow that raised it. defer func() { - if err := h.DataloggerTelemetryService.UpdateDataloggerTableError(ctx, dl.ID, &tn, &model.DataloggerError{Errors: em}); err != nil { + if err := h.DBService.DataloggerTableErrorUpdate(ctx, dl.ID, &tn, &dto.DataloggerError{Errors: em}); err != nil { log.Printf(err.Error()) } }() // Upload Datalogger Measurements - var pl model.DataloggerPayload + var pl dto.DataloggerPayload if err := json.Unmarshal(rawJSON, &pl); err != nil { em = append(em, fmt.Sprintf("%d: %s", http.StatusBadRequest, err.Error())) return httperr.MalformedBody(err) } // Check sn from route param matches sn in request body - if dl.SN != pl.Head.Environment.SerialNo { - snErr := fmt.Sprint(snErrMsg, dl.SN) + if dl.Sn != pl.Head.Environment.SerialNo { + snErr := fmt.Sprint(snErrMsg, dl.Sn) em = append(em, fmt.Sprintf("%d: %s", http.StatusBadRequest, snErr)) return httperr.BadRequest(errors.New(snErr)) } @@ -111,34 +113,32 @@ func getCR6Handler(h *TelemetryHandler, dl model.Datalogger, rawJSON []byte) ech // reroute deferred errors and previews to respective table tn = pl.Head.Environment.TableName - var prv model.DataloggerTablePreview - if err := prv.Preview.Set(rawJSON); err != nil { - return httperr.MalformedBody(err) - } - prv.UpdateDate = time.Now() + var prv dto.DataloggerTablePreview + prv.Preview = rawJSON + prv.UpdatedAt = time.Now() - tableID, err := h.DataloggerTelemetryService.UpdateDataloggerTablePreview(ctx, dl.ID, tn, prv) + tableID, err := h.DBService.DataloggerTablePreviewUpdate(ctx, dl.ID, tn, prv) if err != nil { em = append(em, fmt.Sprintf("%d: %s", http.StatusInternalServerError, err.Error())) return httperr.InternalServerError(err) } - eqt, err := h.EquivalencyTableService.GetEquivalencyTable(ctx, tableID) + eqt, err := h.DBService.EquivalencyTableGet(ctx, tableID) if err != nil { em = append(em, fmt.Sprintf("%d: %s", http.StatusInternalServerError, err.Error())) return httperr.InternalServerError(err) } - eqtFields := make(map[string]model.EquivalencyTableRow) - for _, r := range eqt.Rows { - eqtFields[r.FieldName] = model.EquivalencyTableRow{ + eqtFields := make(map[string]dto.EquivalencyTableRow) + for _, r := range eqt.Fields { + eqtFields[r.FieldName] = dto.EquivalencyTableRow{ TimeseriesID: r.TimeseriesID, InstrumentID: r.InstrumentID, } } fields := pl.Head.Fields - mcs := make([]model.MeasurementCollection, len(fields)) + mcs := make([]dto.MeasurementCollection, len(fields)) // Error if there is no field name in equivalency table to map the field name in the raw payload to // delete the keys that were used, check for any dangling afterwards @@ -161,7 +161,7 @@ func getCR6Handler(h *TelemetryHandler, dl model.Datalogger, rawJSON []byte) ech } // collect measurements - items := make([]model.Measurement, len(pl.Data)) + items := make([]dto.Measurement, len(pl.Data)) for j, d := range pl.Data { // To avoid complications of daylight savings and related issues, // all incoming datalogger timestamps are expected to be in UTC @@ -178,10 +178,10 @@ func getCR6Handler(h *TelemetryHandler, dl model.Datalogger, rawJSON []byte) ech delete(eqtFields, f.Name) continue } - items[j] = model.Measurement{TimeseriesID: *row.TimeseriesID, Time: t, Value: model.FloatNanInf(v)} + items[j] = dto.Measurement{TimeseriesID: *row.TimeseriesID, Time: t, Value: dto.FloatNanInf(v)} } - mcs[i] = model.MeasurementCollection{TimeseriesID: *row.TimeseriesID, Items: items} + mcs[i] = dto.MeasurementCollection{TimeseriesID: *row.TimeseriesID, Items: items} delete(eqtFields, f.Name) } @@ -191,11 +191,11 @@ func getCR6Handler(h *TelemetryHandler, dl model.Datalogger, rawJSON []byte) ech em = append(em, fmt.Sprintf("field '%s' in equivalency table does not match any fields from datalogger", eqtName)) } - if _, err = h.MeasurementService.CreateOrUpdateTimeseriesMeasurements(ctx, mcs); err != nil { + if err := h.DBService.TimeseriesMeasurementCreateOrUpdateBatch(ctx, mcs); err != nil { em = append(em, fmt.Sprintf("%d: %s", http.StatusInternalServerError, err.Error())) return httperr.InternalServerError(err) } - return c.JSON(http.StatusOK, map[string]interface{}{"model": *dl.Model, "sn": dl.SN}) + return c.JSON(http.StatusOK, map[string]interface{}{"model": *dl.Model, "sn": dl.Sn}) } } diff --git a/api/internal/handler/datalogger_test.go b/api/internal/handler/datalogger_test.go index 10160b11..de4fb707 100644 --- a/api/internal/handler/datalogger_test.go +++ b/api/internal/handler/datalogger_test.go @@ -24,12 +24,12 @@ var dataloggerSchema = fmt.Sprintf(`{ "name": { "type": "string" }, "sn": { "type": "string" }, "project_id": { "type": "string" }, - "creator_id": { "type": "string" }, - "creator_username": { "type": "string" }, - "create_date": { "type": "string" }, - "updater_id": { "type": ["string", "null"] }, - "updater_username": { "type": "string" }, - "update_date": { "type": ["string", "null"] }, + "created_by": { "type": "string" }, + "created_by_username": { "type": "string" }, + "created_at": { "type": "string" }, + "updated_by": { "type": ["string", "null"] }, + "updated_by_username": { "type": "string" }, + "updated_at": { "type": ["string", "null"] }, "slug": { "type": "string" }, "model_id": { "type": "string" }, "model": { "type": "string" }, @@ -42,9 +42,9 @@ var dataloggerSchema = fmt.Sprintf(`{ "name", "sn", "project_id", - "creator_id", - "creator_username", - "create_date", + "created_by", + "created_by_username", + "created_at", "slug", "model_id", "model", @@ -63,7 +63,7 @@ const dataloggerPreviewSchema = `{ "type": "object", "properties": { "datalogger_table_id": { "type": "string" }, - "update_date": { "type": "string" }, + "updated_at": { "type": "string" }, "preview": { "type": ["object", "array", "null"] } } }` diff --git a/api/internal/handler/district_rollup.go b/api/internal/handler/district_rollup.go index 900a313b..7c4ea11e 100644 --- a/api/internal/handler/district_rollup.go +++ b/api/internal/handler/district_rollup.go @@ -4,8 +4,9 @@ import ( "net/http" "time" + "github.com/USACE/instrumentation-api/api/internal/db" "github.com/USACE/instrumentation-api/api/internal/httperr" - "github.com/USACE/instrumentation-api/api/internal/model" + "github.com/USACE/instrumentation-api/api/internal/util" "github.com/google/uuid" "github.com/labstack/echo/v4" ) @@ -18,18 +19,18 @@ const timeRangeErrMessage = "maximum requested time range exceeded (5 years)" // @Tags district-rollup // @Produce json // @Param project_id path string true "project id" Format(uuid) -// @Success 200 {array} model.DistrictRollup +// @Success 200 {array} db.VDistrictRollup // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError // @Router /projects/{project_id}/district_rollup/evaluation_submittals [get] func (h *ApiHandler) ListProjectEvaluationDistrictRollup(c echo.Context) error { - id, err := uuid.Parse(c.Param("project_id")) + pID, err := uuid.Parse(c.Param("project_id")) if err != nil { httperr.MalformedID(err) } - var tw model.TimeWindow + var tw util.TimeWindow from, to := c.QueryParam("from_timestamp_month"), c.QueryParam("to_timestamp_month") if err := tw.SetWindow(from, to, time.Now().AddDate(-1, 0, 0), time.Now()); err != nil { return httperr.MalformedDate(err) @@ -38,7 +39,11 @@ func (h *ApiHandler) ListProjectEvaluationDistrictRollup(c echo.Context) error { return httperr.Message(http.StatusBadRequest, timeRangeErrMessage) } - project, err := h.DistrictRollupService.ListEvaluationDistrictRollup(c.Request().Context(), id, tw) + project, err := h.DBService.DistrictRollupListEvaluationForProjectAlertConfig(c.Request().Context(), db.DistrictRollupListEvaluationForProjectAlertConfigParams{ + ProjectID: pID, + StartMonthTime: tw.After, + EndMonthTime: tw.Before, + }) if err != nil { return httperr.InternalServerError(err) } @@ -51,18 +56,18 @@ func (h *ApiHandler) ListProjectEvaluationDistrictRollup(c echo.Context) error { // @Tags district-rollup // @Produce json // @Param project_id path string true "project id" Format(uuid) -// @Success 200 {array} model.DistrictRollup +// @Success 200 {array} db.VDistrictRollup // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError // @Router /projects/{project_id}/district_rollup/measurement_submittals [get] func (h *ApiHandler) ListProjectMeasurementDistrictRollup(c echo.Context) error { - id, err := uuid.Parse(c.Param("project_id")) + pID, err := uuid.Parse(c.Param("project_id")) if err != nil { return httperr.MalformedID(err) } - var tw model.TimeWindow + var tw util.TimeWindow from, to := c.QueryParam("from_timestamp_month"), c.QueryParam("to_timestamp_month") if err := tw.SetWindow(from, to, time.Now().AddDate(-1, 0, 0), time.Now()); err != nil { return httperr.MalformedDate(err) @@ -71,7 +76,11 @@ func (h *ApiHandler) ListProjectMeasurementDistrictRollup(c echo.Context) error return httperr.Message(http.StatusBadRequest, timeRangeErrMessage) } - project, err := h.DistrictRollupService.ListMeasurementDistrictRollup(c.Request().Context(), id, tw) + project, err := h.DBService.DistrictRollupListMeasurementForProjectAlertConfig(c.Request().Context(), db.DistrictRollupListMeasurementForProjectAlertConfigParams{ + ProjectID: pID, + StartMonthTime: tw.After, + EndMonthTime: tw.Before, + }) if err != nil { return httperr.InternalServerError(err) } diff --git a/api/internal/handler/domain.go b/api/internal/handler/domain.go index d7fe4831..ed2977a1 100644 --- a/api/internal/handler/domain.go +++ b/api/internal/handler/domain.go @@ -3,23 +3,24 @@ package handler import ( "net/http" + _ "github.com/USACE/instrumentation-api/api/internal/db" "github.com/USACE/instrumentation-api/api/internal/httperr" - _ "github.com/USACE/instrumentation-api/api/internal/model" + _ "github.com/USACE/instrumentation-api/api/internal/service" "github.com/labstack/echo/v4" ) -// GetDomains godoc +// ListDomains godoc // // @Summary lists all domains // @Tags domain // @Produce json -// @Success 200 {array} model.Domain +// @Success 200 {array} db.VDomain // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError // @Router /domains [get] -func (h *ApiHandler) GetDomains(c echo.Context) error { - dd, err := h.DomainService.GetDomains(c.Request().Context()) +func (h *ApiHandler) ListDomains(c echo.Context) error { + dd, err := h.DBService.DomainList(c.Request().Context()) if err != nil { return httperr.InternalServerError(err) } @@ -31,15 +32,33 @@ func (h *ApiHandler) GetDomains(c echo.Context) error { // @Summary Get map with domain group as key // @Tags domain // @Produce json -// @Success 200 {object} model.DomainMap +// @Success 200 {object} service.DomainMap // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError // @Router /domains/map [get] func (h *ApiHandler) GetDomainMap(c echo.Context) error { - dm, err := h.DomainService.GetDomainMap(c.Request().Context()) + dm, err := h.DBService.DomainMapGet(c.Request().Context()) if err != nil { return httperr.InternalServerError(err) } return c.JSON(http.StatusOK, dm) } + +// ListTimezoneOptions godoc +// +// @Summary lists time zone options +// @Tags domain +// @Produce json +// @Success 200 {array} db.PgTimezoneNamesListRow +// @Failure 400 {object} echo.HTTPError +// @Failure 404 {object} echo.HTTPError +// @Failure 500 {object} echo.HTTPError +// @Router /domains [get] +func (h *ApiHandler) ListTimezoneOptions(c echo.Context) error { + dd, err := h.DBService.PgTimezoneNamesList(c.Request().Context()) + if err != nil { + return httperr.InternalServerError(err) + } + return c.JSON(http.StatusOK, dd) +} diff --git a/api/internal/handler/equivalency_table.go b/api/internal/handler/equivalency_table.go index a0df6b0b..8e03259b 100644 --- a/api/internal/handler/equivalency_table.go +++ b/api/internal/handler/equivalency_table.go @@ -1,11 +1,13 @@ package handler import ( + "errors" "fmt" "net/http" + _ "github.com/USACE/instrumentation-api/api/internal/db" + "github.com/USACE/instrumentation-api/api/internal/dto" "github.com/USACE/instrumentation-api/api/internal/httperr" - "github.com/USACE/instrumentation-api/api/internal/model" "github.com/google/uuid" "github.com/labstack/echo/v4" ) @@ -18,7 +20,7 @@ import ( // @Param datalogger_id path string true "datalogger uuid" Format(uuid) // @Param datalogger_table_id path string true "datalogger table uuid" Format(uuid) // @Param key query string false "api key" -// @Success 200 {array} model.EquivalencyTable +// @Success 200 {array} db.VDataloggerEquivalencyTable // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -37,11 +39,15 @@ func (h *ApiHandler) GetEquivalencyTable(c echo.Context) error { ctx := c.Request().Context() - if err := h.DataloggerService.VerifyDataloggerExists(ctx, dlID); err != nil { - return httperr.ServerErrorOrNotFound(err) + exists, err := h.DBService.DataloggerGetExists(ctx, dlID) + if err != nil { + return httperr.InternalServerError(err) + } + if !exists { + return httperr.NotFound(errors.New("requested datalogger does not exist")) } - t, err := h.EquivalencyTableService.GetEquivalencyTable(ctx, dataloggerTableID) + t, err := h.DBService.EquivalencyTableGet(ctx, dataloggerTableID) if err != nil { return httperr.ServerErrorOrNotFound(err) } @@ -56,9 +62,9 @@ func (h *ApiHandler) GetEquivalencyTable(c echo.Context) error { // @Produce json // @Param datalogger_id path string true "datalogger uuid" Format(uuid) // @Param datalogger_table_id path string true "datalogger table uuid" Format(uuid) -// @Param equivalency_table body model.EquivalencyTable true "equivalency table payload" +// @Param equivalency_table body dto.EquivalencyTable true "equivalency table payload" // @Param key query string false "api key" -// @Success 200 {object} model.EquivalencyTable +// @Success 200 {object} db.VDataloggerEquivalencyTable // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -71,7 +77,7 @@ func (h *ApiHandler) CreateEquivalencyTable(c echo.Context) error { return httperr.MalformedID(err) } - t := model.EquivalencyTable{DataloggerID: dlID} + t := dto.EquivalencyTable{DataloggerID: dlID} if err := c.Bind(&t); err != nil { return httperr.MalformedBody(err) } @@ -90,7 +96,7 @@ func (h *ApiHandler) CreateEquivalencyTable(c echo.Context) error { if t.DataloggerTableName == "" { return httperr.Message(http.StatusBadRequest, "payload must contain datalogger_table_name field") } - dataloggerTableID, err = h.DataloggerService.GetOrCreateDataloggerTable(ctx, dlID, t.DataloggerTableName) + dataloggerTableID, err = h.DBService.DataloggerTableGetOrCreate(ctx, dlID, t.DataloggerTableName) if err != nil { httperr.InternalServerError(err) } @@ -99,15 +105,23 @@ func (h *ApiHandler) CreateEquivalencyTable(c echo.Context) error { t.DataloggerID = dlID t.DataloggerTableID = dataloggerTableID - if err := h.DataloggerService.VerifyDataloggerExists(ctx, dlID); err != nil { - return httperr.ServerErrorOrNotFound(err) + exists, err := h.DBService.DataloggerGetExists(ctx, dlID) + if err != nil { + return httperr.InternalServerError(err) + } + if !exists { + return httperr.NotFound(errors.New("requested datalogger does not exist")) } - if err := h.EquivalencyTableService.GetIsValidDataloggerTable(ctx, dataloggerTableID); err != nil { + valid, err := h.DBService.DataloggerTableGetIsValid(ctx, dataloggerTableID) + if err != nil { + return httperr.InternalServerError(err) + } + if !valid { return httperr.Message(http.StatusBadRequest, fmt.Sprintf("invalid datalogger table %s %s", t.DataloggerID, t.DataloggerTableName)) } - eqt, err := h.EquivalencyTableService.CreateOrUpdateEquivalencyTable(ctx, t) + eqt, err := h.DBService.EquivalencyTableCreateOrUpdate(ctx, t) if err != nil { return httperr.InternalServerError(err) } @@ -122,9 +136,9 @@ func (h *ApiHandler) CreateEquivalencyTable(c echo.Context) error { // @Produce json // @Param datalogger_id path string true "datalogger uuid" Format(uuid) // @Param datalogger_table_id path string true "datalogger table uuid" Format(uuid) -// @Param equivalency_table body model.EquivalencyTable true "equivalency table payload" +// @Param equivalency_table body dto.EquivalencyTable true "equivalency table payload" // @Param key query string false "api key" -// @Success 200 {object} model.EquivalencyTable +// @Success 200 {object} db.VDataloggerEquivalencyTable // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -141,7 +155,7 @@ func (h *ApiHandler) UpdateEquivalencyTable(c echo.Context) error { return httperr.MalformedID(err) } - t := model.EquivalencyTable{DataloggerID: dlID, DataloggerTableID: dataloggerTableID} + t := dto.EquivalencyTable{DataloggerID: dlID, DataloggerTableID: dataloggerTableID} if err := c.Bind(&t); err != nil { return httperr.MalformedBody(err) } @@ -151,11 +165,15 @@ func (h *ApiHandler) UpdateEquivalencyTable(c echo.Context) error { ctx := c.Request().Context() - if err := h.DataloggerService.VerifyDataloggerExists(ctx, dlID); err != nil { - return httperr.ServerErrorOrNotFound(err) + exists, err := h.DBService.DataloggerGetExists(ctx, dlID) + if err != nil { + return httperr.InternalServerError(err) + } + if !exists { + return httperr.NotFound(errors.New("requested datalogger does not exist")) } - eqtUpdated, err := h.EquivalencyTableService.UpdateEquivalencyTable(ctx, t) + eqtUpdated, err := h.DBService.EquivalencyTableUpdate(ctx, t) if err != nil { return httperr.InternalServerError(err) } @@ -190,11 +208,15 @@ func (h *ApiHandler) DeleteEquivalencyTable(c echo.Context) error { ctx := c.Request().Context() - if err := h.DataloggerService.VerifyDataloggerExists(ctx, dlID); err != nil { - return httperr.ServerErrorOrNotFound(err) + exists, err := h.DBService.DataloggerGetExists(ctx, dlID) + if err != nil { + return httperr.InternalServerError(err) + } + if !exists { + return httperr.NotFound(errors.New("requested datalogger does not exist")) } - if err := h.DataloggerService.DeleteDataloggerTable(ctx, dataloggerTableID); err != nil { + if err := h.DBService.DataloggerTableDelete(ctx, dataloggerTableID); err != nil { return httperr.InternalServerError(err) } @@ -232,11 +254,15 @@ func (h *ApiHandler) DeleteEquivalencyTableRow(c echo.Context) error { ctx := c.Request().Context() - if err := h.DataloggerService.VerifyDataloggerExists(ctx, dlID); err != nil { - return httperr.ServerErrorOrNotFound(err) + exists, err := h.DBService.DataloggerGetExists(ctx, dlID) + if err != nil { + return httperr.InternalServerError(err) + } + if !exists { + return httperr.NotFound(errors.New("requested datalogger does not exist")) } - if err := h.EquivalencyTableService.DeleteEquivalencyTableRow(ctx, rowID); err != nil { + if err := h.DBService.EquivalencyTableDelete(ctx, rowID); err != nil { return httperr.InternalServerError(err) } diff --git a/api/internal/handler/evaluation.go b/api/internal/handler/evaluation.go index 09c219d5..374e6b2b 100644 --- a/api/internal/handler/evaluation.go +++ b/api/internal/handler/evaluation.go @@ -4,8 +4,9 @@ import ( "net/http" "time" + "github.com/USACE/instrumentation-api/api/internal/db" + "github.com/USACE/instrumentation-api/api/internal/dto" "github.com/USACE/instrumentation-api/api/internal/httperr" - "github.com/USACE/instrumentation-api/api/internal/model" "github.com/google/uuid" "github.com/labstack/echo/v4" @@ -17,7 +18,7 @@ import ( // @Tags evaluation // @Produce json // @Param project_id path string true "project uuid" Format(uuid) -// @Success 200 {array} model.Evaluation +// @Success 200 {array} db.VEvaluation // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -28,18 +29,21 @@ func (h *ApiHandler) ListProjectEvaluations(c echo.Context) error { return httperr.MalformedID(err) } ctx := c.Request().Context() - var ee []model.Evaluation + var ee []db.VEvaluation if qp := c.QueryParam("alert_config_id"); qp != "" { alertConfigID, err := uuid.Parse(qp) if err != nil { return httperr.MalformedID(err) } - ee, err = h.EvaluationService.ListProjectEvaluationsByAlertConfig(ctx, projectID, alertConfigID) + ee, err = h.DBService.EvaluationListForProjectAlertConfig(ctx, db.EvaluationListForProjectAlertConfigParams{ + ProjectID: projectID, + AlertConfigID: &alertConfigID, + }) if err != nil { return httperr.InternalServerError(err) } } else { - ee, err = h.EvaluationService.ListProjectEvaluations(ctx, projectID) + ee, err = h.DBService.EvaluationListForProject(ctx, projectID) if err != nil { return httperr.InternalServerError(err) } @@ -54,7 +58,7 @@ func (h *ApiHandler) ListProjectEvaluations(c echo.Context) error { // @Produce json // @Param project_id path string true "project uuid" Format(uuid) // @Param instrument_id path string true "instrument uuid" Format(uuid) -// @Success 200 {array} model.Evaluation +// @Success 200 {array} dto.Evaluation // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -64,7 +68,7 @@ func (h *ApiHandler) ListInstrumentEvaluations(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - ee, err := h.EvaluationService.ListInstrumentEvaluations(c.Request().Context(), instrumentID) + ee, err := h.DBService.EvaluationListForInstrument(c.Request().Context(), &instrumentID) if err != nil { return httperr.InternalServerError(err) } @@ -78,7 +82,7 @@ func (h *ApiHandler) ListInstrumentEvaluations(c echo.Context) error { // @Produce json // @Param project_id path string true "project uuid" Format(uuid) // @Param evaluation_id path string true "evaluation uuid" Format(uuid) -// @Success 200 {object} model.Evaluation +// @Success 200 {object} db.VEvaluation // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -88,7 +92,7 @@ func (h *ApiHandler) GetEvaluation(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - ev, err := h.EvaluationService.GetEvaluation(c.Request().Context(), acID) + ev, err := h.DBService.EvaluationGet(c.Request().Context(), acID) if err != nil { return httperr.ServerErrorOrNotFound(err) } @@ -101,16 +105,16 @@ func (h *ApiHandler) GetEvaluation(c echo.Context) error { // @Tags evaluation // @Produce json // @Param project_id path string true "project uuid" Format(uuid) -// @Param evaluation body model.Evaluation true "evaluation payload" +// @Param evaluation body dto.Evaluation true "evaluation payload" // @Param key query string false "api key" -// @Success 200 {object} model.Evaluation +// @Success 201 {object} db.VEvaluation // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError // @Router /projects/{project_id}/evaluations [post] // @Security Bearer func (h *ApiHandler) CreateEvaluation(c echo.Context) error { - ev := model.Evaluation{} + ev := dto.Evaluation{} if err := c.Bind(&ev); err != nil { return httperr.MalformedBody(err) } @@ -118,10 +122,10 @@ func (h *ApiHandler) CreateEvaluation(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - profile := c.Get("profile").(model.Profile) - ev.ProjectID, ev.CreatorID, ev.CreateDate = projectID, profile.ID, time.Now() + profile := c.Get("profile").(db.VProfile) + ev.ProjectID, ev.CreatedBy, ev.CreatedAt = projectID, profile.ID, time.Now() - evNew, err := h.EvaluationService.CreateEvaluation(c.Request().Context(), ev) + evNew, err := h.DBService.EvaluationCreate(c.Request().Context(), ev) if err != nil { return httperr.InternalServerError(err) } @@ -135,16 +139,16 @@ func (h *ApiHandler) CreateEvaluation(c echo.Context) error { // @Produce json // @Param project_id path string true "project uuid" Format(uuid) // @Param evaluation_id path string true "evaluation uuid" Format(uuid) -// @Param evaluation body model.Evaluation true "evaluation payload" +// @Param evaluation body dto.Evaluation true "evaluation payload" // @Param key query string false "api key" -// @Success 200 {object} model.Evaluation +// @Success 200 {object} db.VEvaluation // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError // @Router /projects/{project_id}/evaluations/{evaluation_id} [put] // @Security Bearer func (h *ApiHandler) UpdateEvaluation(c echo.Context) error { - var ev model.Evaluation + var ev dto.Evaluation if err := c.Bind(&ev); err != nil { return httperr.MalformedBody(err) } @@ -152,10 +156,10 @@ func (h *ApiHandler) UpdateEvaluation(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - p := c.Get("profile").(model.Profile) + p := c.Get("profile").(db.VProfile) t := time.Now() - ev.UpdaterID, ev.UpdateDate = &p.ID, &t - evUpdated, err := h.EvaluationService.UpdateEvaluation(c.Request().Context(), evID, ev) + ev.UpdatedBy, ev.UpdatedAt = &p.ID, &t + evUpdated, err := h.DBService.EvaluationUpdate(c.Request().Context(), evID, ev) if err != nil { return httperr.InternalServerError(err) } @@ -170,7 +174,7 @@ func (h *ApiHandler) UpdateEvaluation(c echo.Context) error { // @Param project_id path string true "project uuid" Format(uuid) // @Param evaluation_id path string true "evaluation uuid" Format(uuid) // @Param key query string false "api key" -// @Success 200 {array} model.AlertConfig +// @Success 200 {array} map[string]interface{} // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -181,7 +185,7 @@ func (h *ApiHandler) DeleteEvaluation(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - if err := h.EvaluationService.DeleteEvaluation(c.Request().Context(), acID); err != nil { + if err := h.DBService.EvaluationDelete(c.Request().Context(), acID); err != nil { return httperr.InternalServerError(err) } return c.JSON(http.StatusOK, make(map[string]interface{})) diff --git a/api/internal/handler/evaluation_test.go b/api/internal/handler/evaluation_test.go index 815bf2c7..3ea29872 100644 --- a/api/internal/handler/evaluation_test.go +++ b/api/internal/handler/evaluation_test.go @@ -28,15 +28,15 @@ var evaluationSchema = fmt.Sprintf(`{ "alert_config_id": { "type": ["string", "null"] }, "submittal_id": { "type": ["string", "null"] }, "alert_config_name": { "type": ["string", "null"] }, - "start_date": { "type": "string", "format": "date-time" }, - "end_date": { "type": "string", "format": "date-time" }, + "started_at": { "type": "string", "format": "date-time" }, + "ended_at": { "type": "string", "format": "date-time" }, "instruments": { "type": "array", "items": %s }, - "creator_id": { "type": "string" }, - "creator_username": { "type": "string" }, - "create_date": { "type": "string", "format": "date-time" }, - "updater_id": { "type": ["string", "null"] }, - "updater_username": { "type": ["string", "null"] }, - "update_date": { "type": ["string", "null"], "format": "date-time" } + "created_by": { "type": "string" }, + "created_by_username": { "type": "string" }, + "created_at": { "type": "string", "format": "date-time" }, + "updated_by": { "type": ["string", "null"] }, + "updated_by_username": { "type": ["string", "null"] }, + "updated_at": { "type": ["string", "null"], "format": "date-time" } }, "additionalProperties": false }`, evaluationInstrumentSchema) @@ -57,8 +57,8 @@ const createEvaluationBody = `{ "project_id": "5b6f4f37-7755-4cf9-bd02-94f1e9bc5984", "name": "New Test Evaluation", "body": "New Test Evaluation Description", - "start_date": "2023-05-16T13:19:41.441328Z", - "end_date": "2023-06-16T13:19:41.441328Z", + "started_at": "2023-05-16T13:19:41.441328Z", + "ended_at": "2023-06-16T13:19:41.441328Z", "submittal_id": "f8189297-f1a6-489d-9ea7-f1a0ffc30153", "instruments": [ {"instrument_id": "a7540f69-c41e-43b3-b655-6e44097edb7e"} @@ -70,8 +70,8 @@ const updateEvaluationBody = `{ "project_id": "5b6f4f37-7755-4cf9-bd02-94f1e9bc5984", "name": "Updated Test Evaluation", "body": "Updated Test Evaluation Description", - "start_date": "2023-07-16T13:19:41.441328Z", - "end_date": "2023-08-16T13:19:41.441328Z", + "started_at": "2023-07-16T13:19:41.441328Z", + "ended_at": "2023-08-16T13:19:41.441328Z", "instruments": [] }` diff --git a/api/internal/handler/handler.go b/api/internal/handler/handler.go index 456ded3a..e4b31da2 100644 --- a/api/internal/handler/handler.go +++ b/api/internal/handler/handler.go @@ -7,7 +7,6 @@ import ( "github.com/USACE/instrumentation-api/api/internal/cloud" "github.com/USACE/instrumentation-api/api/internal/config" "github.com/USACE/instrumentation-api/api/internal/middleware" - "github.com/USACE/instrumentation-api/api/internal/model" "github.com/USACE/instrumentation-api/api/internal/service" ) @@ -21,132 +20,47 @@ func newHttpClient() *http.Client { } type ApiHandler struct { - Middleware middleware.Middleware - BlobService cloud.Blob - AlertService service.AlertService - AlertConfigService service.AlertConfigService - AlertSubscriptionService service.AlertSubscriptionService - EmailAutocompleteService service.EmailAutocompleteService - AwareParameterService service.AwareParameterService - CollectionGroupService service.CollectionGroupService - DataloggerService service.DataloggerService - DistrictRollupService service.DistrictRollupService - DomainService service.DomainService - EquivalencyTableService service.EquivalencyTableService - EvaluationService service.EvaluationService - HeartbeatService service.HeartbeatService - HomeService service.HomeService - InstrumentService service.InstrumentService - InstrumentAssignService service.InstrumentAssignService - InstrumentConstantService service.InstrumentConstantService - InstrumentGroupService service.InstrumentGroupService - InstrumentNoteService service.InstrumentNoteService - InstrumentStatusService service.InstrumentStatusService - IpiInstrumentService service.IpiInstrumentService - MeasurementService service.MeasurementService - InclinometerMeasurementService service.InclinometerMeasurementService - OpendcsService service.OpendcsService - PlotConfigService service.PlotConfigService - ProfileService service.ProfileService - ProjectRoleService service.ProjectRoleService - ProjectService service.ProjectService - ReportConfigService service.ReportConfigService - SaaInstrumentService service.SaaInstrumentService - SubmittalService service.SubmittalService - TimeseriesService service.TimeseriesService - TimeseriesCwmsService service.TimeseriesCwmsService - CalculatedTimeseriesService service.CalculatedTimeseriesService - ProcessTimeseriesService service.ProcessTimeseriesService - UnitService service.UnitService + DBService *service.DBService + Middleware middleware.Middleware + BlobService cloud.Blob + PubsubService cloud.Pubsub + Config *config.ApiConfig } func NewApi(cfg *config.ApiConfig) *ApiHandler { - db := model.NewDatabase(&cfg.DBConfig) - q := db.Queries() - ps := cloud.NewSQSPubsub(&cfg.AWSSQSConfig) - - profileService := service.NewProfileService(db, q) - projectRoleService := service.NewProjectRoleService(db, q) - dataloggerTelemetryService := service.NewDataloggerTelemetryService(db, q) - mw := middleware.NewMiddleware(&cfg.ServerConfig, profileService, projectRoleService, dataloggerTelemetryService) + ds := service.NewDBService(cfg.DBConfig) return &ApiHandler{ - Middleware: mw, - BlobService: cloud.NewS3Blob(&cfg.AWSS3Config, "/instrumentation", cfg.RoutePrefix), - AlertService: service.NewAlertService(db, q), - AlertConfigService: service.NewAlertConfigService(db, q), - AlertSubscriptionService: service.NewAlertSubscriptionService(db, q), - EmailAutocompleteService: service.NewEmailAutocompleteService(db, q), - AwareParameterService: service.NewAwareParameterService(db, q), - CollectionGroupService: service.NewCollectionGroupService(db, q), - DataloggerService: service.NewDataloggerService(db, q), - DistrictRollupService: service.NewDistrictRollupService(db, q), - DomainService: service.NewDomainService(db, q), - EquivalencyTableService: service.NewEquivalencyTableService(db, q), - EvaluationService: service.NewEvaluationService(db, q), - HeartbeatService: service.NewHeartbeatService(db, q), - HomeService: service.NewHomeService(db, q), - InstrumentService: service.NewInstrumentService(db, q), - InstrumentAssignService: service.NewInstrumentAssignService(db, q), - InstrumentConstantService: service.NewInstrumentConstantService(db, q), - InstrumentGroupService: service.NewInstrumentGroupService(db, q), - InstrumentNoteService: service.NewInstrumentNoteService(db, q), - InstrumentStatusService: service.NewInstrumentStatusService(db, q), - IpiInstrumentService: service.NewIpiInstrumentService(db, q), - MeasurementService: service.NewMeasurementService(db, q), - InclinometerMeasurementService: service.NewInclinometerMeasurementService(db, q), - OpendcsService: service.NewOpendcsService(db, q), - PlotConfigService: service.NewPlotConfigService(db, q), - ProfileService: profileService, - ProjectRoleService: service.NewProjectRoleService(db, q), - ProjectService: service.NewProjectService(db, q), - ReportConfigService: service.NewReportConfigService(db, q, ps, cfg.AuthJWTMocked), - SaaInstrumentService: service.NewSaaInstrumentService(db, q), - SubmittalService: service.NewSubmittalService(db, q), - TimeseriesService: service.NewTimeseriesService(db, q), - TimeseriesCwmsService: service.NewTimeseriesCwmsService(db, q), - CalculatedTimeseriesService: service.NewCalculatedTimeseriesService(db, q), - ProcessTimeseriesService: service.NewProcessTimeseriesService(db, q), - UnitService: service.NewUnitService(db, q), + DBService: ds, + Middleware: middleware.NewMiddleware(&cfg.ServerConfig, ds), + BlobService: cloud.NewS3Blob(&cfg.AWSS3Config, "/instrumentation", cfg.RoutePrefix), + PubsubService: cloud.NewSQSPubsub(&cfg.AWSSQSConfig), + Config: cfg, } } type TelemetryHandler struct { - Middleware middleware.Middleware - DataloggerService service.DataloggerService - DataloggerTelemetryService service.DataloggerTelemetryService - EquivalencyTableService service.EquivalencyTableService - MeasurementService service.MeasurementService + DBService *service.DBService + Middleware middleware.Middleware } func NewTelemetry(cfg *config.TelemetryConfig) *TelemetryHandler { - db := model.NewDatabase(&cfg.DBConfig) - q := db.Queries() - - profileService := service.NewProfileService(db, q) - projectRoleService := service.NewProjectRoleService(db, q) - dataloggerTelemetryService := service.NewDataloggerTelemetryService(db, q) - mw := middleware.NewMiddleware(&cfg.ServerConfig, profileService, projectRoleService, dataloggerTelemetryService) + ds := service.NewDBService(cfg.DBConfig) return &TelemetryHandler{ - Middleware: mw, - DataloggerService: service.NewDataloggerService(db, q), - DataloggerTelemetryService: dataloggerTelemetryService, - EquivalencyTableService: service.NewEquivalencyTableService(db, q), - MeasurementService: service.NewMeasurementService(db, q), + DBService: ds, + Middleware: middleware.NewMiddleware(&cfg.ServerConfig, ds), } } type AlertCheckHandler struct { - AlertCheckService service.AlertCheckService + DBService *service.DBService + Config *config.AlertCheckConfig } func NewAlertCheck(cfg *config.AlertCheckConfig) *AlertCheckHandler { - db := model.NewDatabase(&cfg.DBConfig) - q := db.Queries() - return &AlertCheckHandler{ - AlertCheckService: service.NewAlertCheckService(db, q, cfg), + DBService: service.NewDBService(cfg.DBConfig), } } diff --git a/api/internal/handler/handler_test.go b/api/internal/handler/handler_test.go index 4fba2264..ae8fa14e 100644 --- a/api/internal/handler/handler_test.go +++ b/api/internal/handler/handler_test.go @@ -19,6 +19,7 @@ import ( const ( truncateLinesBody = 30 + maxLines = 50 host = "http://localhost:8080" mockJwt = `Bearer eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCIsImtpZCI6Ikw0YXFVRmd6YV9RVjhqc1ZOa281OW5GVzl6bGh1b0JGX3RxdlpkTUZkajQifQ.eyJzdWIiOiJmOGRjYWZlYS0yNDNlLTRiODktOGQ3ZC1mYTAxOTE4MTMwZjQiLCJ0eXAiOiJCZWFyZXIiLCJhbGxvd2VkLW9yaWdpbnMiOlsiaHR0cDovL2xvY2FsaG9zdDozMDAwIl0sIm5hbWUiOiJBbnRob255IExhbWJlcnQiLCJwcmVmZXJyZWRfdXNlcm5hbWUiOiJ0ZXN0IiwiZ2l2ZW5fbmFtZSI6IkFudGhvbnkiLCJmYW1pbHlfbmFtZSI6IkxhbWJlcnQiLCJlbWFpbCI6ImFudGhvbnkubS5sYW1iZXJ0QGZha2UudXNhY2UuYXJteS5taWwiLCJzdWJqZWN0RE4iOiJsYW1iZXJ0LmFudGhvbnkubS4yIiwiY2FjVUlEIjoiMiJ9.8CjeifD51ZEZZOx9eeMd7RPanvtgkQQus-R19aU91Rw` mockAppKey = "appkey" @@ -134,7 +135,14 @@ func RunAll(t *testing.T, tests []HTTPTest) { assert.Truef(t, valid, "response body did not match json schema:") if !valid { var errs string - for _, err := range result.Errors() { + for idx, err := range result.Errors() { + if idx >= maxLines { + if idx == maxLines { + errs += "\n" + errs += "..." + } + continue + } errs += "\n" errs += err.String() } diff --git a/api/internal/handler/heartbeat.go b/api/internal/handler/heartbeat.go index faa6cd34..5ed4c9a8 100644 --- a/api/internal/handler/heartbeat.go +++ b/api/internal/handler/heartbeat.go @@ -1,10 +1,14 @@ package handler import ( + "errors" "net/http" + "strconv" + "time" + _ "github.com/USACE/instrumentation-api/api/internal/dto" "github.com/USACE/instrumentation-api/api/internal/httperr" - _ "github.com/USACE/instrumentation-api/api/internal/model" + "github.com/USACE/instrumentation-api/api/internal/service" "github.com/labstack/echo/v4" ) @@ -13,14 +17,14 @@ import ( // @Summary checks the health of the api server // @Tags heartbeat // @Produce json -// @Success 200 {array} map[string]interface{} +// @Success 200 {object} service.Healthcheck // @Router /health [get] func (h *ApiHandler) Healthcheck(c echo.Context) error { - return c.JSON(http.StatusOK, map[string]interface{}{"status": "healthy"}) + return c.JSON(http.StatusOK, service.Healthcheck{Status: "healthy"}) } func (h *TelemetryHandler) Healthcheck(c echo.Context) error { - return c.JSON(http.StatusOK, map[string]interface{}{"status": "healthy"}) + return c.JSON(http.StatusOK, service.Healthcheck{Status: "healthy"}) } // DoHeartbeat godoc @@ -29,10 +33,10 @@ func (h *TelemetryHandler) Healthcheck(c echo.Context) error { // @Tags heartbeat // @Produce json // @Param key query string true "api key" -// @Success 200 {object} model.Heartbeat +// @Success 201 {object} service.Heartbeat // @Router /heartbeat [post] func (h *ApiHandler) DoHeartbeat(c echo.Context) error { - hb, err := h.HeartbeatService.DoHeartbeat(c.Request().Context()) + hb, err := h.DBService.HeartbeatCreate(c.Request().Context(), time.Now()) if err != nil { return httperr.InternalServerError(err) } @@ -44,10 +48,10 @@ func (h *ApiHandler) DoHeartbeat(c echo.Context) error { // @Summary gets the latest heartbeat // @Tags heartbeat // @Produce json -// @Success 200 {object} model.Heartbeat +// @Success 200 {object} service.Heartbeat // @Router /heartbeat/latest [get] func (h *ApiHandler) GetLatestHeartbeat(c echo.Context) error { - hb, err := h.HeartbeatService.GetLatestHeartbeat(c.Request().Context()) + hb, err := h.DBService.HeartbeatGetLatest(c.Request().Context()) if err != nil { return httperr.InternalServerError(err) } @@ -59,10 +63,19 @@ func (h *ApiHandler) GetLatestHeartbeat(c echo.Context) error { // @Summary returns all heartbeats // @Tags heartbeat // @Produce json -// @Success 200 {array} model.Heartbeat +// @Success 200 {array} service.Heartbeat // @Router /heartbeats [get] func (h *ApiHandler) ListHeartbeats(c echo.Context) error { - hh, err := h.HeartbeatService.ListHeartbeats(c.Request().Context()) + var limit int32 = 50 + limitParam := c.QueryParam("limit") + if limitParam == "" { + limit64, err := strconv.ParseInt(limitParam, 10, 32) + if err != nil { + return httperr.BadRequest(errors.New("invalid value for parameter \"limit\"")) + } + limit = int32(limit64) + } + hh, err := h.DBService.HeartbeatList(c.Request().Context(), limit) if err != nil { return httperr.InternalServerError(err) } diff --git a/api/internal/handler/home.go b/api/internal/handler/home.go index 094ab632..21082d21 100644 --- a/api/internal/handler/home.go +++ b/api/internal/handler/home.go @@ -3,8 +3,9 @@ package handler import ( "net/http" + _ "github.com/USACE/instrumentation-api/api/internal/db" + _ "github.com/USACE/instrumentation-api/api/internal/dto" "github.com/USACE/instrumentation-api/api/internal/httperr" - _ "github.com/USACE/instrumentation-api/api/internal/model" "github.com/labstack/echo/v4" ) @@ -13,11 +14,11 @@ import ( // @Summary gets information for the homepage // @Tags home // @Produce json -// @Success 200 {object} model.Home +// @Success 200 {object} db.HomeGetRow // @Failure 500 {object} echo.HTTPError // @Router /home [get] func (h *ApiHandler) GetHome(c echo.Context) error { - home, err := h.HomeService.GetHome(c.Request().Context()) + home, err := h.DBService.HomeGet(c.Request().Context()) if err != nil { return httperr.InternalServerError(err) } diff --git a/api/internal/handler/instrument.go b/api/internal/handler/instrument.go index 47f366a1..0cc6b321 100644 --- a/api/internal/handler/instrument.go +++ b/api/internal/handler/instrument.go @@ -1,52 +1,35 @@ package handler import ( + "encoding/json" "net/http" "strings" "time" + "github.com/USACE/instrumentation-api/api/internal/db" + "github.com/USACE/instrumentation-api/api/internal/dto" "github.com/USACE/instrumentation-api/api/internal/httperr" - "github.com/USACE/instrumentation-api/api/internal/model" - "github.com/paulmach/orb/geojson" "github.com/google/uuid" "github.com/labstack/echo/v4" ) -// ListInstruments godoc -// -// @Summary lists all instruments -// @Tags instrument -// @Produce json -// @Success 200 {array} model.Instrument -// @Failure 400 {object} echo.HTTPError -// @Failure 404 {object} echo.HTTPError -// @Failure 500 {object} echo.HTTPError -// @Router /instruments [get] -func (h *ApiHandler) ListInstruments(c echo.Context) error { - nn, err := h.InstrumentService.ListInstruments(c.Request().Context()) - if err != nil { - return httperr.InternalServerError(err) - } - return c.JSON(http.StatusOK, nn) -} - // GetInstrumentCount godoc // // @Summary gets the total number of non deleted instruments in the system // @Tags instrument // @Produce json -// @Success 200 {object} model.InstrumentCount +// @Success 200 {object} map[string]interface{} // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError // @Router /instruments/count [get] func (h *ApiHandler) GetInstrumentCount(c echo.Context) error { - ic, err := h.InstrumentService.GetInstrumentCount(c.Request().Context()) + ic, err := h.DBService.InstrumentGetCount(c.Request().Context()) if err != nil { return httperr.InternalServerError(err) } - return c.JSON(http.StatusOK, ic) + return c.JSON(http.StatusOK, map[string]interface{}{"instrument_count": ic}) } // GetInstrument godoc @@ -55,7 +38,7 @@ func (h *ApiHandler) GetInstrumentCount(c echo.Context) error { // @Tags instrument // @Produce json // @Param instrument_id path string true "instrument uuid" Format(uuid) -// @Success 200 {object} model.Instrument +// @Success 200 {object} db.VInstrument // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -65,7 +48,7 @@ func (h *ApiHandler) GetInstrument(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - n, err := h.InstrumentService.GetInstrument(c.Request().Context(), id) + n, err := h.DBService.InstrumentGet(c.Request().Context(), id) if err != nil { return httperr.ServerErrorOrNotFound(err) } @@ -80,9 +63,9 @@ func (h *ApiHandler) GetInstrument(c echo.Context) error { // @Produce json // @Param project_id path string true "project id" Format(uuid) // @Param instrument_id path string true "instrument id" Format(uuid) -// @Param instrument body model.InstrumentCollection true "instrument collection payload" +// @Param instrument body dto.InstrumentCollection true "instrument collection payload" // @Param key query string false "api key" -// @Success 200 {array} model.IDSlugName +// @Success 201 {array} db.InstrumentCreateBatchRow // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -96,26 +79,26 @@ func (h *ApiHandler) CreateInstruments(c echo.Context) error { return httperr.MalformedID(err) } - ic := model.InstrumentCollection{} + ic := dto.InstrumentCollection{} if err := c.Bind(&ic); err != nil { return httperr.MalformedBody(err) } - p := c.Get("profile").(model.Profile) + p := c.Get("profile").(db.VProfile) t := time.Now() instrumentNames := make([]string, len(ic)) for idx := range ic { instrumentNames[idx] = ic[idx].Name - var prj model.IDSlugName + var prj dto.IDSlugName prj.ID = projectID - ic[idx].Projects = []model.IDSlugName{prj} - ic[idx].CreatorID = p.ID - ic[idx].CreateDate = t + ic[idx].Projects = []dto.IDSlugName{prj} + ic[idx].CreatedBy = p.ID + ic[idx].CreatedAt = t } if strings.ToLower(c.QueryParam("dry_run")) == "true" { - v, err := h.InstrumentAssignService.ValidateInstrumentNamesProjectUnique(ctx, projectID, instrumentNames) + v, err := h.DBService.ProjectInstrumentGetInstrumentNamesUniqueForProject(ctx, projectID, instrumentNames) if err != nil { return httperr.InternalServerError(err) } @@ -125,7 +108,7 @@ func (h *ApiHandler) CreateInstruments(c echo.Context) error { return c.JSON(http.StatusOK, v) } - nn, err := h.InstrumentService.CreateInstruments(ctx, ic) + nn, err := h.DBService.InstrumentCreateBatch(ctx, ic) if err != nil { return httperr.InternalServerError(err) } @@ -140,9 +123,9 @@ func (h *ApiHandler) CreateInstruments(c echo.Context) error { // @Produce json // @Param project_id path string true "project uuid" Format(uuid) // @Param instrument_id path string true "instrument uuid" Format(uuid) -// @Param instrument body model.Instrument true "instrument payload" +// @Param instrument body dto.Instrument true "instrument payload" // @Param key query string false "api key" -// @Success 200 {object} model.Instrument +// @Success 200 {object} db.VInstrument // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -158,23 +141,21 @@ func (h *ApiHandler) UpdateInstrument(c echo.Context) error { return httperr.MalformedID(err) } - var i model.Instrument + var i dto.Instrument if err := c.Bind(&i); err != nil { return httperr.MalformedBody(err) } i.ID = iID - p := c.Get("profile").(model.Profile) + p := c.Get("profile").(db.VProfile) t := time.Now() - i.UpdaterID, i.UpdateDate = &p.ID, &t + i.UpdatedBy, i.UpdatedAt = &p.ID, &t - // update - iUpdated, err := h.InstrumentService.UpdateInstrument(c.Request().Context(), pID, i) + iUpdated, err := h.DBService.InstrumentUpdate(c.Request().Context(), pID, i) if err != nil { return httperr.InternalServerError(err) } - // return updated instrument return c.JSON(http.StatusOK, iUpdated) } @@ -185,9 +166,9 @@ func (h *ApiHandler) UpdateInstrument(c echo.Context) error { // @Produce json // @Param project_id path string true "project uuid" Format(uuid) // @Param instrument_id path string true "instrument uuid" Format(uuid) -// @Param instrument body model.Instrument true "instrument payload" +// @Param instrument body dto.Instrument true "instrument payload" // @Param key query string false "api key" -// @Success 200 {object} model.Instrument +// @Success 200 {object} db.VInstrument // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -202,14 +183,18 @@ func (h *ApiHandler) UpdateInstrumentGeometry(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - var geom geojson.Geometry - if err := c.Bind(&geom); err != nil { + var j json.RawMessage + if err := c.Bind(&j); err != nil { return httperr.MalformedBody(err) } - // profile of user creating instruments - p := c.Get("profile").(model.Profile) + p := c.Get("profile").(db.VProfile) - instrument, err := h.InstrumentService.UpdateInstrumentGeometry(c.Request().Context(), projectID, instrumentID, geom, p) + instrument, err := h.DBService.InstrumentUpdateGeometry(c.Request().Context(), db.InstrumentUpdateGeometryParams{ + ProjectID: projectID, + ID: instrumentID, + Geometry: j, + UpdatedBy: &p.ID, + }) if err != nil { return httperr.InternalServerError(err) } @@ -236,12 +221,15 @@ func (h *ApiHandler) DeleteFlagInstrument(c echo.Context) error { return httperr.MalformedID(err) } - iID, err := uuid.Parse(c.Param("instrument_id")) + instID, err := uuid.Parse(c.Param("instrument_id")) if err != nil { return httperr.MalformedID(err) } - if err := h.InstrumentService.DeleteFlagInstrument(c.Request().Context(), pID, iID); err != nil { + if err := h.DBService.InstrumentDeleteFlag(c.Request().Context(), db.InstrumentDeleteFlagParams{ + ID: instID, + ProjectID: pID, + }); err != nil { return httperr.InternalServerError(err) } diff --git a/api/internal/handler/instrument_assign.go b/api/internal/handler/instrument_assign.go index 6f7c046d..b97b1706 100644 --- a/api/internal/handler/instrument_assign.go +++ b/api/internal/handler/instrument_assign.go @@ -4,8 +4,10 @@ import ( "net/http" "strings" + "github.com/USACE/instrumentation-api/api/internal/db" + "github.com/USACE/instrumentation-api/api/internal/dto" "github.com/USACE/instrumentation-api/api/internal/httperr" - "github.com/USACE/instrumentation-api/api/internal/model" + _ "github.com/USACE/instrumentation-api/api/internal/service" "github.com/google/uuid" "github.com/labstack/echo/v4" ) @@ -19,7 +21,7 @@ import ( // @Param project_id path string true "project uuid" Format(uuid) // @Param instrument_id path string true "instrument uuid" Format(uuid) // @Param dry_run query string false "validate request without performing action" -// @Success 200 {object} model.InstrumentsValidation +// @Success 201 {object} service.InstrumentsValidation // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -35,9 +37,9 @@ func (h *ApiHandler) AssignInstrumentToProject(c echo.Context) error { return httperr.MalformedID(err) } dryRun := strings.ToLower(c.QueryParam("dry_run")) == "true" - p := c.Get("profile").(model.Profile) + p := c.Get("profile").(db.VProfile) - v, err := h.InstrumentAssignService.AssignInstrumentsToProject(c.Request().Context(), p.ID, pID, []uuid.UUID{iID}, dryRun) + v, err := h.DBService.ProjectInstrumentCreateBatchAssignmentInstrumentsToProject(c.Request().Context(), p.ID, pID, []uuid.UUID{iID}, dryRun) if err != nil { return httperr.InternalServerError(err) } @@ -55,7 +57,7 @@ func (h *ApiHandler) AssignInstrumentToProject(c echo.Context) error { // @Param instrument_id path string true "instrument uuid" Format(uuid) // @Param action query string true "valid values are 'assign' or 'unassign'" // @Param dry_run query string false "validate request without performing action" -// @Success 200 {object} model.InstrumentsValidation +// @Success 200 {object} service.InstrumentsValidation // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -71,9 +73,9 @@ func (h *ApiHandler) UnassignInstrumentFromProject(c echo.Context) error { return httperr.MalformedID(err) } dryRun := strings.ToLower(c.QueryParam("dry_run")) == "true" - p := c.Get("profile").(model.Profile) + p := c.Get("profile").(db.VProfile) - v, err := h.InstrumentAssignService.UnassignInstrumentsFromProject(c.Request().Context(), p.ID, pID, []uuid.UUID{iID}, dryRun) + v, err := h.DBService.ProjectInstrumentDeleteBatchAssignmentInstrumentsToProject(c.Request().Context(), p.ID, pID, []uuid.UUID{iID}, dryRun) if err != nil { return httperr.InternalServerError(err) } @@ -89,10 +91,10 @@ func (h *ApiHandler) UnassignInstrumentFromProject(c echo.Context) error { // @Produce json // @Param project_id path string true "project uuid" Format(uuid) // @Param instrument_id path string true "instrument uuid" Format(uuid) -// @Param project_ids body model.InstrumentProjectAssignments true "project uuids" +// @Param project_ids body dto.InstrumentProjectAssignments true "project uuids" // @Param action query string true "valid values are 'assign' or 'unassign'" // @Param dry_run query string false "validate request without performing action" -// @Success 200 {object} model.InstrumentsValidation +// @Success 200 {object} service.InstrumentsValidation // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -103,10 +105,10 @@ func (h *ApiHandler) UpdateInstrumentProjectAssignments(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - p := c.Get("profile").(model.Profile) + p := c.Get("profile").(db.VProfile) dryRun := strings.ToLower(c.QueryParam("dry_run")) == "true" - pl := model.InstrumentProjectAssignments{ProjectIDs: make([]uuid.UUID, 0)} + pl := dto.InstrumentProjectAssignments{ProjectIDs: make([]uuid.UUID, 0)} if err := c.Bind(&pl); err != nil { return httperr.MalformedBody(err) } @@ -114,13 +116,13 @@ func (h *ApiHandler) UpdateInstrumentProjectAssignments(c echo.Context) error { ctx := c.Request().Context() switch strings.ToLower(c.QueryParam("action")) { case "assign": - v, err := h.InstrumentAssignService.AssignProjectsToInstrument(ctx, p.ID, iID, pl.ProjectIDs, dryRun) + v, err := h.DBService.ProjectInstrumentCreateBatchAssignmentProjectsToInstrument(ctx, p.ID, iID, pl.ProjectIDs, dryRun) if err != nil { return httperr.InternalServerError(err) } return c.JSON(http.StatusOK, v) case "unassign": - v, err := h.InstrumentAssignService.UnassignProjectsFromInstrument(ctx, p.ID, iID, pl.ProjectIDs, dryRun) + v, err := h.DBService.ProjectInstrumentDeleteBatchAssignmentProjectsToInstrument(ctx, p.ID, iID, pl.ProjectIDs, dryRun) if err != nil { return httperr.InternalServerError(err) } @@ -137,10 +139,10 @@ func (h *ApiHandler) UpdateInstrumentProjectAssignments(c echo.Context) error { // @Description must be Project (or Application) Admin of all existing instrument projects and project to be assigned // @Produce json // @Param project_id path string true "project uuid" Format(uuid) -// @Param instrument_ids body model.ProjectInstrumentAssignments true "instrument uuids" +// @Param instrument_ids body dto.ProjectInstrumentAssignments true "instrument uuids" // @Param action query string true "valid values are 'assign' or 'unassign'" // @Param dry_run query string false "validate request without performing action" -// @Success 200 {object} model.InstrumentsValidation +// @Success 200 {object} service.InstrumentsValidation // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -151,10 +153,10 @@ func (h *ApiHandler) UpdateProjectInstrumentAssignments(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - p := c.Get("profile").(model.Profile) + p := c.Get("profile").(db.VProfile) dryRun := strings.ToLower(c.QueryParam("dry_run")) == "true" - pl := model.ProjectInstrumentAssignments{InstrumentIDs: make([]uuid.UUID, 0)} + pl := dto.ProjectInstrumentAssignments{InstrumentIDs: make([]uuid.UUID, 0)} if err := c.Bind(&pl); err != nil { return httperr.MalformedBody(err) } @@ -162,13 +164,13 @@ func (h *ApiHandler) UpdateProjectInstrumentAssignments(c echo.Context) error { ctx := c.Request().Context() switch strings.ToLower(c.QueryParam("action")) { case "assign": - v, err := h.InstrumentAssignService.AssignInstrumentsToProject(ctx, p.ID, pID, pl.InstrumentIDs, dryRun) + v, err := h.DBService.ProjectInstrumentCreateBatchAssignmentInstrumentsToProject(ctx, p.ID, pID, pl.InstrumentIDs, dryRun) if err != nil { return httperr.InternalServerError(err) } return c.JSON(http.StatusOK, v) case "unassign": - v, err := h.InstrumentAssignService.UnassignInstrumentsFromProject(ctx, p.ID, pID, pl.InstrumentIDs, dryRun) + v, err := h.DBService.ProjectInstrumentDeleteBatchAssignmentInstrumentsToProject(ctx, p.ID, pID, pl.InstrumentIDs, dryRun) if err != nil { return httperr.InternalServerError(err) } diff --git a/api/internal/handler/instrument_constant.go b/api/internal/handler/instrument_constant.go index 15305335..481df1ab 100644 --- a/api/internal/handler/instrument_constant.go +++ b/api/internal/handler/instrument_constant.go @@ -3,8 +3,9 @@ package handler import ( "net/http" + "github.com/USACE/instrumentation-api/api/internal/db" + "github.com/USACE/instrumentation-api/api/internal/dto" "github.com/USACE/instrumentation-api/api/internal/httperr" - "github.com/USACE/instrumentation-api/api/internal/model" "github.com/google/uuid" "github.com/labstack/echo/v4" ) @@ -16,17 +17,17 @@ import ( // @Produce json // @Param project_id path string true "project uuid" Format(uuid) // @Param instrument_id path string true "instrument uuid" Format(uuid) -// @Success 200 {array} model.Timeseries +// @Success 200 {array} db.VTimeseries // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError // @Router /projects/{project_id}/instruments/{instrument_id}/constants [get] func (h *ApiHandler) ListInstrumentConstants(c echo.Context) error { - instrumentID, err := uuid.Parse(c.Param("instrument_id")) + instID, err := uuid.Parse(c.Param("instrument_id")) if err != nil { return httperr.MalformedID(err) } - cc, err := h.InstrumentConstantService.ListInstrumentConstants(c.Request().Context(), instrumentID) + cc, err := h.DBService.InstrumentConstantList(c.Request().Context(), instID) if err != nil { return httperr.InternalServerError(err) } @@ -40,9 +41,9 @@ func (h *ApiHandler) ListInstrumentConstants(c echo.Context) error { // @Produce json // @Param project_id path string true "project uuid" Format(uuid) // @Param instrument_id path string true "instrument uuid" Format(uuid) -// @Param timeseries_collection_items body model.TimeseriesCollectionItems true "timeseries collection items payload" +// @Param timeseries_collection_items body dto.TimeseriesCollectionItems true "timeseries collection items payload" // @Param key query string false "api key" -// @Success 200 {array} model.Timeseries +// @Success 200 {array} db.TimeseriesCreateBatchRow // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -50,20 +51,20 @@ func (h *ApiHandler) ListInstrumentConstants(c echo.Context) error { // @Security Bearer func (h *ApiHandler) CreateInstrumentConstants(c echo.Context) error { ctx := c.Request().Context() - var tc model.TimeseriesCollectionItems + var tc dto.TimeseriesCollectionItems if err := c.Bind(&tc); err != nil { return httperr.MalformedBody(err) } - instrumentID, err := uuid.Parse(c.Param("instrument_id")) + instID, err := uuid.Parse(c.Param("instrument_id")) if err != nil { return httperr.MalformedID(err) } for idx := range tc.Items { - if instrumentID != tc.Items[idx].InstrumentID { + if instID != tc.Items[idx].InstrumentID { return httperr.Message(http.StatusBadRequest, "all instrument ids in body must match query parameter") } } - tt, err := h.InstrumentConstantService.CreateInstrumentConstants(ctx, tc.Items) + tt, err := h.DBService.InstrumentConstantCreateBatch(ctx, tc.Items) if err != nil { return httperr.InternalServerError(err) } @@ -86,15 +87,18 @@ func (h *ApiHandler) CreateInstrumentConstants(c echo.Context) error { // @Router /projects/{project_id}/instruments/{instrument_id}/constants/{timeseries_id} [delete] // @Security Bearer func (h *ApiHandler) DeleteInstrumentConstant(c echo.Context) error { - instrumentID, err := uuid.Parse(c.Param("instrument_id")) + instID, err := uuid.Parse(c.Param("instrument_id")) if err != nil { return httperr.MalformedID(err) } - timeseriesID, err := uuid.Parse(c.Param("timeseries_id")) + tsID, err := uuid.Parse(c.Param("timeseries_id")) if err != nil { return httperr.MalformedID(err) } - err = h.InstrumentConstantService.DeleteInstrumentConstant(c.Request().Context(), instrumentID, timeseriesID) + err = h.DBService.InstrumentConstantDelete(c.Request().Context(), db.InstrumentConstantDeleteParams{ + InstrumentID: instID, + TimeseriesID: tsID, + }) if err != nil { return httperr.InternalServerError(err) } diff --git a/api/internal/handler/instrument_group.go b/api/internal/handler/instrument_group.go index 751c2b86..2cab7508 100644 --- a/api/internal/handler/instrument_group.go +++ b/api/internal/handler/instrument_group.go @@ -4,8 +4,9 @@ import ( "net/http" "time" + "github.com/USACE/instrumentation-api/api/internal/db" + "github.com/USACE/instrumentation-api/api/internal/dto" "github.com/USACE/instrumentation-api/api/internal/httperr" - "github.com/USACE/instrumentation-api/api/internal/model" "github.com/google/uuid" "github.com/labstack/echo/v4" @@ -16,13 +17,13 @@ import ( // @Summary lists all instrument groups // @Tags instrument-group // @Produce json -// @Success 200 {array} model.InstrumentGroup +// @Success 200 {array} db.VInstrumentGroup // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError // @Router /instrument_groups [get] func (h *ApiHandler) ListInstrumentGroups(c echo.Context) error { - groups, err := h.InstrumentGroupService.ListInstrumentGroups(c.Request().Context()) + groups, err := h.DBService.InstrumentGroupList(c.Request().Context()) if err != nil { return httperr.InternalServerError(err) } @@ -35,7 +36,7 @@ func (h *ApiHandler) ListInstrumentGroups(c echo.Context) error { // @Tags instrument-group // @Produce json // @Param instrument_group_id path string true "instrument group uuid" Format(uuid) -// @Success 200 {object} model.InstrumentGroup +// @Success 200 {object} db.VInstrumentGroup // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -45,7 +46,7 @@ func (h *ApiHandler) GetInstrumentGroup(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - g, err := h.InstrumentGroupService.GetInstrumentGroup(c.Request().Context(), id) + g, err := h.DBService.InstrumentGroupGet(c.Request().Context(), id) if err != nil { return httperr.InternalServerError(err) } @@ -57,30 +58,29 @@ func (h *ApiHandler) GetInstrumentGroup(c echo.Context) error { // @Summary creats an instrument group from an array of instruments // @Tags instrument-group // @Produce json -// @Param instrument_group body model.InstrumentGroup true "instrument group payload" +// @Param instrument_group body dto.InstrumentGroup true "instrument group payload" // @Param key query string false "api key" -// @Success 201 {object} model.InstrumentGroup +// @Success 201 {array} db.InstrumentGroup // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError // @Router /instrument_groups [post] // @Security Bearer func (h *ApiHandler) CreateInstrumentGroup(c echo.Context) error { - - gc := model.InstrumentGroupCollection{} + var gc dto.InstrumentGroupCollection if err := c.Bind(&gc); err != nil { return httperr.MalformedBody(err) } - p := c.Get("profile").(model.Profile) + p := c.Get("profile").(db.VProfile) t := time.Now() for idx := range gc.Items { - gc.Items[idx].CreatorID = p.ID - gc.Items[idx].CreateDate = t + gc.Items[idx].CreatedBy = p.ID + gc.Items[idx].CreatedAt = t } - gg, err := h.InstrumentGroupService.CreateInstrumentGroup(c.Request().Context(), gc.Items) + gg, err := h.DBService.InstrumentGroupCreateBatch(c.Request().Context(), gc.Items) if err != nil { return httperr.InternalServerError(err) } @@ -93,9 +93,9 @@ func (h *ApiHandler) CreateInstrumentGroup(c echo.Context) error { // @Tags instrument-group // @Produce json // @Param instrument_group_id path string true "instrument group uuid" Format(uuid) -// @Param instrument_group body model.InstrumentGroup true "instrument group payload" +// @Param instrument_group body dto.InstrumentGroup true "instrument group payload" // @Param key query string false "api key" -// @Success 200 {object} model.InstrumentGroup +// @Success 200 {object} db.InstrumentGroupUpdateRow // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -107,18 +107,18 @@ func (h *ApiHandler) UpdateInstrumentGroup(c echo.Context) error { return httperr.MalformedID(err) } - g := model.InstrumentGroup{ID: gID} + var g dto.InstrumentGroup if err := c.Bind(&g); err != nil { return httperr.MalformedBody(err) } g.ID = gID - p := c.Get("profile").(model.Profile) + p := c.Get("profile").(db.VProfile) t := time.Now() - g.UpdaterID, g.UpdateDate = &p.ID, &t + g.UpdatedBy, g.UpdatedAt = &p.ID, &t - gUpdated, err := h.InstrumentGroupService.UpdateInstrumentGroup(c.Request().Context(), g) + gUpdated, err := h.DBService.InstrumentGroupUpdate(c.Request().Context(), g) if err != nil { return httperr.InternalServerError(err) } @@ -132,7 +132,7 @@ func (h *ApiHandler) UpdateInstrumentGroup(c echo.Context) error { // @Produce json // @Param instrument_group_id path string true "instrument group uuid" Format(uuid) // @Param key query string false "api key" -// @Success 200 {array} model.InstrumentGroup +// @Success 200 {object} map[string]interface{} // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -143,7 +143,7 @@ func (h *ApiHandler) DeleteFlagInstrumentGroup(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - if err := h.InstrumentGroupService.DeleteFlagInstrumentGroup(c.Request().Context(), id); err != nil { + if err := h.DBService.InstrumentGroupDeleteFlag(c.Request().Context(), id); err != nil { return httperr.InternalServerError(err) } return c.JSON(http.StatusOK, make(map[string]interface{})) @@ -155,7 +155,7 @@ func (h *ApiHandler) DeleteFlagInstrumentGroup(c echo.Context) error { // @Tags instrument-group // @Produce json // @Param instrument_group_id path string true "instrument group uuid" Format(uuid) -// @Success 200 {array} model.Instrument +// @Success 200 {array} db.VInstrument // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -165,7 +165,7 @@ func (h *ApiHandler) ListInstrumentGroupInstruments(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - nn, err := h.InstrumentGroupService.ListInstrumentGroupInstruments(c.Request().Context(), id) + nn, err := h.DBService.InstrumentListForInstrumentGroup(c.Request().Context(), id) if err != nil { return httperr.InternalServerError(err) } @@ -190,11 +190,14 @@ func (h *ApiHandler) CreateInstrumentGroupInstruments(c echo.Context) error { if err != nil || instrumentGroupID == uuid.Nil { return httperr.MalformedID(err) } - var i model.Instrument + var i dto.Instrument if err := c.Bind(&i); err != nil || i.ID == uuid.Nil { return httperr.MalformedBody(err) } - if err := h.InstrumentGroupService.CreateInstrumentGroupInstruments(c.Request().Context(), instrumentGroupID, i.ID); err != nil { + if err := h.DBService.InstrumentGroupInstrumentCreate(c.Request().Context(), db.InstrumentGroupInstrumentCreateParams{ + InstrumentGroupID: instrumentGroupID, + InstrumentID: i.ID, + }); err != nil { return httperr.InternalServerError(err) } return c.JSON(http.StatusCreated, make(map[string]interface{})) @@ -225,7 +228,10 @@ func (h *ApiHandler) DeleteInstrumentGroupInstruments(c echo.Context) error { return httperr.MalformedID(err) } - if err := h.InstrumentGroupService.DeleteInstrumentGroupInstruments(c.Request().Context(), instrumentGroupID, instrumentID); err != nil { + if err := h.DBService.InstrumentGroupInstrumentDelete(c.Request().Context(), db.InstrumentGroupInstrumentDeleteParams{ + InstrumentGroupID: instrumentGroupID, + InstrumentID: instrumentID, + }); err != nil { return httperr.InternalServerError(err) } diff --git a/api/internal/handler/instrument_group_test.go b/api/internal/handler/instrument_group_test.go index 963bf429..43dff938 100644 --- a/api/internal/handler/instrument_group_test.go +++ b/api/internal/handler/instrument_group_test.go @@ -16,15 +16,15 @@ const instrumentGroupSchema = `{ "slug": { "type": "string" }, "name": { "type": "string" }, "description": { "type": "string" }, - "creator_id": { "type": "string" }, - "create_date": { "type": "string", "format": "date-time" }, - "updater_id": { "type": ["string", "null"] }, - "update_date": { "type": ["string", "null"], "format": "date-time" }, + "created_by": { "type": "string" }, + "created_at": { "type": "string", "format": "date-time" }, + "updated_by": { "type": ["string", "null"] }, + "updated_at": { "type": ["string", "null"], "format": "date-time" }, "project_id": { "type": ["string", "null"] }, "instrument_count": { "type": "number" }, "timeseries_count": { "type": "number" } }, - "required": ["id", "slug", "name", "description", "creator_id", "create_date", "updater_id", "update_date", "project_id"], + "required": ["id", "slug", "name", "description", "created_by", "created_at", "updated_by", "updated_at", "project_id"], "additionalProperties": false }` diff --git a/api/internal/handler/instrument_incl.go b/api/internal/handler/instrument_incl.go new file mode 100644 index 00000000..380b1889 --- /dev/null +++ b/api/internal/handler/instrument_incl.go @@ -0,0 +1,99 @@ +package handler + +import ( + "net/http" + "time" + + "github.com/USACE/instrumentation-api/api/internal/db" + "github.com/USACE/instrumentation-api/api/internal/dto" + "github.com/USACE/instrumentation-api/api/internal/httperr" + "github.com/USACE/instrumentation-api/api/internal/util" + "github.com/google/uuid" + "github.com/labstack/echo/v4" +) + +// ListInclSegmentsForInstrument godoc +// +// @Summary gets all incl segments for an instrument +// @Tags instrument-incl +// @Produce json +// @Param instrument_id path string true "instrument uuid" Format(uuid) +// @Success 200 {array} db.VInclSegment +// @Failure 400 {object} echo.HTTPError +// @Failure 404 {object} echo.HTTPError +// @Failure 500 {object} echo.HTTPError +// @Router /instruments/incl/{instrument_id}/segments [get] +func (h *ApiHandler) ListInclSegmentsForInstrument(c echo.Context) error { + iID, err := uuid.Parse(c.Param("instrument_id")) + if err != nil { + return httperr.MalformedID(err) + } + ss, err := h.DBService.InclSegmentListForInstrument(c.Request().Context(), iID) + if err != nil { + return httperr.InternalServerError(err) + } + return c.JSON(http.StatusOK, ss) +} + +// GetInclMeasurementsForInstrument godoc +// +// @Summary creates instrument notes +// @Tags instrument-incl +// @Produce json +// @Param instrument_id path string true "instrument uuid" Format(uuid) +// @Param after query string false "after time" Format(date-time) +// @Param before query string true "before time" Format(date-time) +// @Success 200 {array} db.VInclMeasurement +// @Failure 400 {object} echo.HTTPError +// @Failure 404 {object} echo.HTTPError +// @Failure 500 {object} echo.HTTPError +// @Router /instruments/incl/{instrument_id}/measurements [get] +func (h *ApiHandler) GetInclMeasurementsForInstrument(c echo.Context) error { + iID, err := uuid.Parse(c.Param("instrument_id")) + if err != nil { + return httperr.MalformedID(err) + } + var tw util.TimeWindow + a, b := c.QueryParam("after"), c.QueryParam("before") + if err := tw.SetWindow(a, b, time.Now().AddDate(0, 0, -7), time.Now()); err != nil { + return httperr.MalformedDate(err) + } + mm, err := h.DBService.InclMeasurementListForInstrumentRange(c.Request().Context(), db.InclMeasurementListForInstrumentRangeParams{ + InstrumentID: iID, + StartTime: tw.After, + EndTime: tw.Before, + }) + if err != nil { + return httperr.MalformedID(err) + } + return c.JSON(http.StatusOK, mm) +} + +// UpdateInclSegments godoc +// +// @Summary updates multiple segments for an incl instrument +// @Tags instrument-incl +// @Produce json +// @Param instrument_id path string true "instrument uuid" Format(uuid) +// @Param instrument_segments body []dto.InclSegment true "incl instrument segments payload" +// @Param key query string false "api key" +// @Success 200 {array} dto.InclSegment +// @Failure 400 {object} echo.HTTPError +// @Failure 404 {object} echo.HTTPError +// @Failure 500 {object} echo.HTTPError +// @Router /instruments/incl/{instrument_id}/segments [put] +// @Security Bearer +func (h *ApiHandler) UpdateInclSegments(c echo.Context) error { + iID, err := uuid.Parse(c.Param("instrument_id")) + if err != nil { + return httperr.MalformedID(err) + } + segs := make([]dto.InclSegment, 0) + if err := c.Bind(&segs); err != nil { + return httperr.MalformedBody(err) + } + if err := h.DBService.InclSegmentUpdateBatch(c.Request().Context(), iID, segs); err != nil { + return httperr.InternalServerError(err) + } + return c.JSON(http.StatusOK, segs) +} diff --git a/api/internal/handler/instrument_ipi.go b/api/internal/handler/instrument_ipi.go index 1d312554..ea394f6d 100644 --- a/api/internal/handler/instrument_ipi.go +++ b/api/internal/handler/instrument_ipi.go @@ -4,29 +4,31 @@ import ( "net/http" "time" + "github.com/USACE/instrumentation-api/api/internal/db" + "github.com/USACE/instrumentation-api/api/internal/dto" "github.com/USACE/instrumentation-api/api/internal/httperr" - "github.com/USACE/instrumentation-api/api/internal/model" + "github.com/USACE/instrumentation-api/api/internal/util" "github.com/google/uuid" "github.com/labstack/echo/v4" ) -// GetAllIpiSegmentsForInstrument godoc +// ListIpiSegmentsForInstrument godoc // // @Summary gets all ipi segments for an instrument // @Tags instrument-ipi // @Produce json // @Param instrument_id path string true "instrument uuid" Format(uuid) -// @Success 200 {array} model.IpiSegment +// @Success 200 {array} db.VIpiSegment // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError // @Router /instruments/ipi/{instrument_id}/segments [get] -func (h *ApiHandler) GetAllIpiSegmentsForInstrument(c echo.Context) error { +func (h *ApiHandler) ListIpiSegmentsForInstrument(c echo.Context) error { iID, err := uuid.Parse(c.Param("instrument_id")) if err != nil { return httperr.MalformedID(err) } - ss, err := h.IpiInstrumentService.GetAllIpiSegmentsForInstrument(c.Request().Context(), iID) + ss, err := h.DBService.IpiSegmentListForInstrument(c.Request().Context(), iID) if err != nil { return httperr.InternalServerError(err) } @@ -41,7 +43,7 @@ func (h *ApiHandler) GetAllIpiSegmentsForInstrument(c echo.Context) error { // @Param instrument_id path string true "instrument uuid" Format(uuid) // @Param after query string false "after time" Format(date-time) // @Param before query string true "before time" Format(date-time) -// @Success 200 {array} model.IpiMeasurements +// @Success 200 {array} db.VIpiMeasurement // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -51,12 +53,16 @@ func (h *ApiHandler) GetIpiMeasurementsForInstrument(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - var tw model.TimeWindow + var tw util.TimeWindow a, b := c.QueryParam("after"), c.QueryParam("before") if err := tw.SetWindow(a, b, time.Now().AddDate(0, 0, -7), time.Now()); err != nil { return httperr.MalformedDate(err) } - mm, err := h.IpiInstrumentService.GetIpiMeasurementsForInstrument(c.Request().Context(), iID, tw) + mm, err := h.DBService.IpiMeasurementListForInstrumentRange(c.Request().Context(), db.IpiMeasurementListForInstrumentRangeParams{ + InstrumentID: iID, + StartTime: tw.After, + EndTime: tw.Before, + }) if err != nil { return httperr.InternalServerError(err) } @@ -69,20 +75,24 @@ func (h *ApiHandler) GetIpiMeasurementsForInstrument(c echo.Context) error { // @Tags instrument-ipi // @Produce json // @Param instrument_id path string true "instrument uuid" Format(uuid) -// @Param instrument_segments body []model.IpiSegment true "ipi instrument segments payload" +// @Param instrument_segments body []dto.IpiSegment true "ipi instrument segments payload" // @Param key query string false "api key" -// @Success 200 {array} model.IpiSegment +// @Success 200 {array} dto.IpiSegment // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError // @Router /instruments/ipi/{instrument_id}/segments [put] // @Security Bearer func (h *ApiHandler) UpdateIpiSegments(c echo.Context) error { - segs := make([]model.IpiSegment, 0) + iID, err := uuid.Parse(c.Param("instrument_id")) + if err != nil { + return httperr.MalformedID(err) + } + segs := make([]dto.IpiSegment, 0) if err := c.Bind(&segs); err != nil { return httperr.MalformedBody(err) } - if err := h.IpiInstrumentService.UpdateIpiSegments(c.Request().Context(), segs); err != nil { + if err := h.DBService.IpiSegmentUpdateBatch(c.Request().Context(), iID, segs); err != nil { return httperr.InternalServerError(err) } return c.JSON(http.StatusOK, segs) diff --git a/api/internal/handler/instrument_ipi_test.go b/api/internal/handler/instrument_ipi_test.go index 1093082e..49319710 100644 --- a/api/internal/handler/instrument_ipi_test.go +++ b/api/internal/handler/instrument_ipi_test.go @@ -101,7 +101,7 @@ func TestIpiInstruments(t *testing.T) { tests := []HTTPTest{ { - Name: "GetAllIpiSegmentsForInstrument", + Name: "ListIpiSegmentsForInstrument", URL: fmt.Sprintf("/instruments/ipi/%s/segments", testIpiInstrumentID), Method: http.MethodGet, ExpectedStatus: http.StatusOK, diff --git a/api/internal/handler/instrument_note.go b/api/internal/handler/instrument_note.go index fe28b17a..0d096b05 100644 --- a/api/internal/handler/instrument_note.go +++ b/api/internal/handler/instrument_note.go @@ -4,38 +4,22 @@ import ( "net/http" "time" + "github.com/USACE/instrumentation-api/api/internal/db" + _ "github.com/USACE/instrumentation-api/api/internal/db" + "github.com/USACE/instrumentation-api/api/internal/dto" "github.com/USACE/instrumentation-api/api/internal/httperr" - "github.com/USACE/instrumentation-api/api/internal/model" "github.com/google/uuid" "github.com/labstack/echo/v4" ) -// ListInstrumentNotes godoc -// -// @Summary gets all instrument notes -// @Tags instrument-note -// @Produce json -// @Success 200 {array} model.InstrumentNote -// @Failure 400 {object} echo.HTTPError -// @Failure 404 {object} echo.HTTPError -// @Failure 500 {object} echo.HTTPError -// @Router /instruments/notes [get] -func (h *ApiHandler) ListInstrumentNotes(c echo.Context) error { - notes, err := h.InstrumentNoteService.ListInstrumentNotes(c.Request().Context()) - if err != nil { - return httperr.InternalServerError(err) - } - return c.JSON(http.StatusOK, notes) -} - // ListInstrumentInstrumentNotes godoc // // @Summary gets instrument notes for a single instrument // @Tags instrument-note // @Produce json // @Param instrument_id path string true "instrument uuid" Format(uuid) -// @Success 200 {array} model.InstrumentNote +// @Success 200 {array} db.InstrumentNote // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -45,7 +29,7 @@ func (h *ApiHandler) ListInstrumentInstrumentNotes(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - notes, err := h.InstrumentNoteService.ListInstrumentInstrumentNotes(c.Request().Context(), iID) + notes, err := h.DBService.InstrumentNoteListForInstrument(c.Request().Context(), iID) if err != nil { return httperr.InternalServerError(err) } @@ -58,7 +42,7 @@ func (h *ApiHandler) ListInstrumentInstrumentNotes(c echo.Context) error { // @Tags instrument-note // @Produce json // @Param note_id path string true "note uuid" Format(uuid) -// @Success 200 {object} model.InstrumentNote +// @Success 200 {object} db.InstrumentNote // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -68,7 +52,7 @@ func (h *ApiHandler) GetInstrumentNote(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - note, err := h.InstrumentNoteService.GetInstrumentNote(c.Request().Context(), nID) + note, err := h.DBService.InstrumentNoteGet(c.Request().Context(), nID) if err != nil { return httperr.InternalServerError(err) } @@ -80,28 +64,28 @@ func (h *ApiHandler) GetInstrumentNote(c echo.Context) error { // @Summary creates instrument notes // @Tags instrument-note // @Produce json -// @Param instrument_note body model.InstrumentNoteCollection true "instrument note collection payload" +// @Param instrument_note body dto.InstrumentNoteCollection true "instrument note collection payload" // @Param key query string false "api key" -// @Success 200 {array} model.InstrumentNote +// @Success 201 {array} db.InstrumentNote // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError // @Router /instruments/notes [post] // @Security Bearer func (h *ApiHandler) CreateInstrumentNote(c echo.Context) error { - nc := model.InstrumentNoteCollection{} + nc := dto.InstrumentNoteCollection{} if err := c.Bind(&nc); err != nil { return httperr.MalformedBody(err) } // profile and timestamp - p := c.Get("profile").(model.Profile) + p := c.Get("profile").(db.VProfile) t := time.Now() for idx := range nc.Items { - nc.Items[idx].CreatorID = p.ID - nc.Items[idx].CreateDate = t + nc.Items[idx].CreatedBy = p.ID + nc.Items[idx].CreatedAt = t } - nn, err := h.InstrumentNoteService.CreateInstrumentNote(c.Request().Context(), nc.Items) + nn, err := h.DBService.InstrumentNoteCreateBatch(c.Request().Context(), nc.Items) if err != nil { return httperr.InternalServerError(err) } @@ -115,9 +99,9 @@ func (h *ApiHandler) CreateInstrumentNote(c echo.Context) error { // @Tags instrument-note // @Produce json // @Param note_id path string true "note uuid" Format(uuid) -// @Param instrument_note body model.InstrumentNote true "instrument note collection payload" +// @Param instrument_note body dto.InstrumentNote true "instrument note collection payload" // @Param key query string false "api key" -// @Success 200 {array} model.AlertConfig +// @Success 200 {array} db.InstrumentNote // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -128,17 +112,17 @@ func (h *ApiHandler) UpdateInstrumentNote(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - n := model.InstrumentNote{ID: noteID} + var n dto.InstrumentNote if err := c.Bind(&n); err != nil { return httperr.MalformedBody(err) } n.ID = noteID - p := c.Get("profile").(model.Profile) + p := c.Get("profile").(db.VProfile) t := time.Now() - n.UpdaterID, n.UpdateDate = &p.ID, &t + n.UpdatedBy, n.UpdatedAt = &p.ID, &t - nUpdated, err := h.InstrumentNoteService.UpdateInstrumentNote(c.Request().Context(), n) + nUpdated, err := h.DBService.InstrumentNoteUpdate(c.Request().Context(), n) if err != nil { return httperr.InternalServerError(err) } @@ -164,7 +148,7 @@ func (h *ApiHandler) DeleteInstrumentNote(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - if err := h.InstrumentNoteService.DeleteInstrumentNote(c.Request().Context(), noteID); err != nil { + if err := h.DBService.InstrumentNoteDelete(c.Request().Context(), noteID); err != nil { return httperr.InternalServerError(err) } return c.JSON(http.StatusOK, make(map[string]interface{})) diff --git a/api/internal/handler/instrument_note_test.go b/api/internal/handler/instrument_note_test.go index 92bfcc1b..34daaacf 100644 --- a/api/internal/handler/instrument_note_test.go +++ b/api/internal/handler/instrument_note_test.go @@ -17,12 +17,12 @@ const instrumentNoteSchema = `{ "title": { "type": "string" }, "body": { "type": "string" }, "time": { "type": "string" }, - "creator_id": { "type": "string" }, - "create_date": { "type": "string", "format": "date-time" }, - "updater_id": { "type": ["string", "null" ] }, - "update_date": { "type": ["string", "null"], "format": "date-time" } + "created_by": { "type": "string" }, + "created_at": { "type": "string", "format": "date-time" }, + "updated_by": { "type": ["string", "null" ] }, + "updated_at": { "type": ["string", "null"], "format": "date-time" } }, - "required": ["id", "instrument_id", "title", "body", "time", "creator_id", "create_date", "updater_id", "update_date" ], + "required": ["id", "instrument_id", "title", "body", "time", "created_by", "created_at", "updated_by", "updated_at" ], "additionalProperties": false }` @@ -88,13 +88,6 @@ func TestInstrumentNotes(t *testing.T) { ExpectedStatus: http.StatusOK, ExpectedSchema: objSchema, }, - { - Name: "ListInstrumentNotes", - URL: "/instruments/notes", - Method: http.MethodGet, - ExpectedStatus: http.StatusOK, - ExpectedSchema: arrSchema, - }, { Name: "ListInstrumentInstrumentNotes", URL: fmt.Sprintf("/instruments/%s/notes", testInstrumentNoteIntrumentID), diff --git a/api/internal/handler/instrument_saa.go b/api/internal/handler/instrument_saa.go index 6d7cd007..20a6ffb7 100644 --- a/api/internal/handler/instrument_saa.go +++ b/api/internal/handler/instrument_saa.go @@ -4,29 +4,31 @@ import ( "net/http" "time" + "github.com/USACE/instrumentation-api/api/internal/db" + "github.com/USACE/instrumentation-api/api/internal/dto" "github.com/USACE/instrumentation-api/api/internal/httperr" - "github.com/USACE/instrumentation-api/api/internal/model" + "github.com/USACE/instrumentation-api/api/internal/util" "github.com/google/uuid" "github.com/labstack/echo/v4" ) -// GetAllSaaSegmentsForInstrument godoc +// ListSaaSegmentsForInstrument godoc // // @Summary gets all saa segments for an instrument // @Tags instrument-saa // @Produce json // @Param instrument_id path string true "instrument uuid" Format(uuid) -// @Success 200 {array} model.SaaSegment +// @Success 200 {array} db.VSaaSegment // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError // @Router /instruments/saa/{instrument_id}/segments [get] -func (h *ApiHandler) GetAllSaaSegmentsForInstrument(c echo.Context) error { +func (h *ApiHandler) ListSaaSegmentsForInstrument(c echo.Context) error { iID, err := uuid.Parse(c.Param("instrument_id")) if err != nil { return httperr.MalformedID(err) } - ss, err := h.SaaInstrumentService.GetAllSaaSegmentsForInstrument(c.Request().Context(), iID) + ss, err := h.DBService.SaaSegmentListForInstrument(c.Request().Context(), iID) if err != nil { return httperr.InternalServerError(err) } @@ -41,7 +43,7 @@ func (h *ApiHandler) GetAllSaaSegmentsForInstrument(c echo.Context) error { // @Param instrument_id path string true "instrument uuid" Format(uuid) // @Param after query string false "after time" Format(date-time) // @Param before query string true "before time" Format(date-time) -// @Success 200 {array} model.SaaMeasurements +// @Success 200 {array} db.VSaaMeasurement // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -51,12 +53,16 @@ func (h *ApiHandler) GetSaaMeasurementsForInstrument(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - var tw model.TimeWindow + var tw util.TimeWindow a, b := c.QueryParam("after"), c.QueryParam("before") if err := tw.SetWindow(a, b, time.Now().AddDate(0, 0, -7), time.Now()); err != nil { return httperr.MalformedDate(err) } - mm, err := h.SaaInstrumentService.GetSaaMeasurementsForInstrument(c.Request().Context(), iID, tw) + mm, err := h.DBService.SaaMeasurementListForInstrumentRange(c.Request().Context(), db.SaaMeasurementListForInstrumentRangeParams{ + InstrumentID: iID, + StartTime: tw.After, + EndTime: tw.Before, + }) if err != nil { return httperr.MalformedID(err) } @@ -69,20 +75,24 @@ func (h *ApiHandler) GetSaaMeasurementsForInstrument(c echo.Context) error { // @Tags instrument-saa // @Produce json // @Param instrument_id path string true "instrument uuid" Format(uuid) -// @Param instrument_segments body []model.SaaSegment true "saa instrument segments payload" +// @Param instrument_segments body []dto.SaaSegment true "saa instrument segments payload" // @Param key query string false "api key" -// @Success 200 {array} model.SaaSegment +// @Success 200 {array} dto.SaaSegment // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError // @Router /instruments/saa/{instrument_id}/segments [put] // @Security Bearer func (h *ApiHandler) UpdateSaaSegments(c echo.Context) error { - segs := make([]model.SaaSegment, 0) + iID, err := uuid.Parse(c.Param("instrument_id")) + if err != nil { + return httperr.MalformedID(err) + } + segs := make([]dto.SaaSegment, 0) if err := c.Bind(&segs); err != nil { return httperr.MalformedBody(err) } - if err := h.SaaInstrumentService.UpdateSaaSegments(c.Request().Context(), segs); err != nil { + if err := h.DBService.SaaSegmentUpdateBatch(c.Request().Context(), iID, segs); err != nil { return httperr.InternalServerError(err) } return c.JSON(http.StatusOK, segs) diff --git a/api/internal/handler/instrument_saa_test.go b/api/internal/handler/instrument_saa_test.go index 80a861ce..5ef43cb8 100644 --- a/api/internal/handler/instrument_saa_test.go +++ b/api/internal/handler/instrument_saa_test.go @@ -112,7 +112,7 @@ func TestSaaInstruments(t *testing.T) { tests := []HTTPTest{ { - Name: "GetAllSaaSegmentsForInstrument", + Name: "ListSaaSegmentsForInstrument", URL: fmt.Sprintf("/instruments/saa/%s/segments", testSaaInstrumentID), Method: http.MethodGet, ExpectedStatus: http.StatusOK, diff --git a/api/internal/handler/instrument_status.go b/api/internal/handler/instrument_status.go index f84c7711..397d59e2 100644 --- a/api/internal/handler/instrument_status.go +++ b/api/internal/handler/instrument_status.go @@ -3,8 +3,9 @@ package handler import ( "net/http" + _ "github.com/USACE/instrumentation-api/api/internal/db" + "github.com/USACE/instrumentation-api/api/internal/dto" "github.com/USACE/instrumentation-api/api/internal/httperr" - "github.com/USACE/instrumentation-api/api/internal/model" "github.com/google/uuid" "github.com/labstack/echo/v4" @@ -16,7 +17,7 @@ import ( // @Tags instrument-status // @Produce json // @Param instrument_id path string true "instrument uuid" Format(uuid) -// @Success 200 {array} model.InstrumentStatus +// @Success 200 {array} db.VInstrumentStatus // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -27,7 +28,7 @@ func (h *ApiHandler) ListInstrumentStatus(c echo.Context) error { return httperr.MalformedID(err) } - ss, err := h.InstrumentStatusService.ListInstrumentStatus(c.Request().Context(), id) + ss, err := h.DBService.InstrumentStatusListForInstrument(c.Request().Context(), id) if err != nil { return httperr.InternalServerError(err) } @@ -41,7 +42,7 @@ func (h *ApiHandler) ListInstrumentStatus(c echo.Context) error { // @Produce json // @Param instrument_id path string true "instrument uuid" Format(uuid) // @Param status_id path string true "status uuid" Format(uuid) -// @Success 200 {array} model.AlertConfig +// @Success 200 {array} db.VInstrumentStatus // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -52,7 +53,7 @@ func (h *ApiHandler) GetInstrumentStatus(c echo.Context) error { return httperr.MalformedID(err) } - s, err := h.InstrumentStatusService.GetInstrumentStatus(c.Request().Context(), id) + s, err := h.DBService.InstrumentStatusGet(c.Request().Context(), id) if err != nil { return httperr.ServerErrorOrNotFound(err) } @@ -65,7 +66,7 @@ func (h *ApiHandler) GetInstrumentStatus(c echo.Context) error { // @Tags instrument-status // @Produce json // @Param instrument_id path string true "instrument uuid" Format(uuid) -// @Param instrument_status body model.InstrumentStatusCollection true "instrument status collection paylaod" +// @Param instrument_status body dto.InstrumentStatusCollection true "instrument status collection paylaod" // @Param key query string false "api key" // @Success 200 {object} map[string]interface{} // @Failure 400 {object} echo.HTTPError @@ -79,7 +80,7 @@ func (h *ApiHandler) CreateOrUpdateInstrumentStatus(c echo.Context) error { return httperr.MalformedID(err) } - var sc model.InstrumentStatusCollection + var sc dto.InstrumentStatusCollection if err := c.Bind(&sc); err != nil { return httperr.MalformedBody(err) } @@ -91,7 +92,7 @@ func (h *ApiHandler) CreateOrUpdateInstrumentStatus(c echo.Context) error { sc.Items[idx].ID = id } - if err := h.InstrumentStatusService.CreateOrUpdateInstrumentStatus(c.Request().Context(), instrumentID, sc.Items); err != nil { + if err := h.DBService.InstrumentStatusCreateOrUpdateBatch(c.Request().Context(), instrumentID, sc.Items); err != nil { return httperr.InternalServerError(err) } return c.JSON(http.StatusCreated, make(map[string]interface{})) @@ -116,7 +117,7 @@ func (h *ApiHandler) DeleteInstrumentStatus(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - if err := h.InstrumentStatusService.DeleteInstrumentStatus(c.Request().Context(), id); err != nil { + if err := h.DBService.InstrumentStatusDelete(c.Request().Context(), id); err != nil { return httperr.InternalServerError(err) } return c.JSON(http.StatusOK, make(map[string]interface{})) diff --git a/api/internal/handler/instrument_status_test.go b/api/internal/handler/instrument_status_test.go index 5de21b77..07b152e8 100644 --- a/api/internal/handler/instrument_status_test.go +++ b/api/internal/handler/instrument_status_test.go @@ -14,10 +14,11 @@ const instrumentStatusSchema = `{ "properties": { "id": { "type": "string" }, "time": { "type": "string" }, + "instrument_id": { "type": "string" }, "status_id": { "type": "string" }, "status": { "type": "string" } }, - "required": ["id", "time", "status_id", "status"], + "required": ["id", "instrument_id", "time", "status_id", "status"], "additionalProperties": false }` diff --git a/api/internal/handler/instrument_test.go b/api/internal/handler/instrument_test.go index a4db3145..09fe9c97 100644 --- a/api/internal/handler/instrument_test.go +++ b/api/internal/handler/instrument_test.go @@ -51,18 +51,19 @@ var instrumentSchema = fmt.Sprintf(`{ }, "station": { "type": ["number", "null"] }, "offset": { "type": ["number", "null"] }, - "creator_id": { "type": "string" }, - "create_date": { "type": "string", "format": "date-time" }, - "updater_id": { "type": ["string", "null"] }, - "update_date": { "type": ["string", "null"], "format": "date-time" }, + "created_by": { "type": "string" }, + "created_at": { "type": "string", "format": "date-time" }, + "updated_by": { "type": ["string", "null"] }, + "updated_at": { "type": ["string", "null"], "format": "date-time" }, "projects": %s, "nid_id": { "type": ["string", "null"] }, "usgs_id": { "type": ["string", "null"] }, "has_cwms": { "type": "boolean" }, "show_cwms_tab": { "type": "boolean" }, - "opts": { "type": ["object", "null"] } + "opts": { "type": ["object", "null"] }, + "telemetry": { "type": ["array","null"], "items": { "type": "object" }} }, - "required": ["id", "slug", "name", "type_id", "type", "status_id", "status", "status_time", "geometry", "creator_id", "create_date", "updater_id", "update_date", "projects", "station", "offset", "constants", "has_cwms", "alert_configs", "nid_id", "usgs_id", "show_cwms_tab"], + "required": ["id", "slug", "name", "type_id", "type", "status_id", "status", "status_time", "geometry", "created_by", "created_at", "updated_by", "updated_at", "projects", "station", "offset", "constants", "has_cwms", "alert_configs", "nid_id", "usgs_id", "show_cwms_tab"], "additionalProperties": false }`, IDSlugNameArrSchema) @@ -288,13 +289,6 @@ func TestInstruments(t *testing.T) { ExpectedStatus: http.StatusOK, ExpectedSchema: objSchema, }, - { - Name: "ListInstruments", - URL: "/instruments", - Method: http.MethodGet, - ExpectedStatus: http.StatusOK, - ExpectedSchema: arrSchema, - }, { Name: "ListProjectInstruments", URL: fmt.Sprintf("/projects/%s/instruments", testProjectID), diff --git a/api/internal/handler/measurement.go b/api/internal/handler/measurement.go index 0ee8ca4c..d2425e14 100644 --- a/api/internal/handler/measurement.go +++ b/api/internal/handler/measurement.go @@ -1,11 +1,16 @@ package handler import ( + "errors" + "log" "net/http" + "strings" "time" + "github.com/USACE/instrumentation-api/api/internal/db" + "github.com/USACE/instrumentation-api/api/internal/dto" "github.com/USACE/instrumentation-api/api/internal/httperr" - "github.com/USACE/instrumentation-api/api/internal/model" + "github.com/USACE/instrumentation-api/api/internal/util" "github.com/google/uuid" "github.com/labstack/echo/v4" @@ -18,9 +23,9 @@ import ( // @Accept json // @Produce json // @Param project_id path string true "project uuid" Format(uuid) -// @Param timeseries_measurement_collections body model.TimeseriesMeasurementCollectionCollection true "array of timeseries measurement collections" +// @Param timeseries_measurement_collections body dto.TimeseriesMeasurementCollectionCollection true "array of timeseries measurement collections" // @Param key query string false "api key" -// @Success 200 {array} model.MeasurementCollection +// @Success 200 {object} map[string]interface{} // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -28,7 +33,7 @@ import ( // @Security Bearer func (h *ApiHandler) CreateOrUpdateProjectTimeseriesMeasurements(c echo.Context) error { ctx := c.Request().Context() - var mcc model.TimeseriesMeasurementCollectionCollection + var mcc dto.TimeseriesMeasurementCollectionCollection if err := c.Bind(&mcc); err != nil { return httperr.MalformedBody(err) } @@ -38,17 +43,27 @@ func (h *ApiHandler) CreateOrUpdateProjectTimeseriesMeasurements(c echo.Context) return httperr.MalformedID(err) } - dd := mcc.TimeseriesIDs() - if err := h.TimeseriesService.AssertTimeseriesLinkedToProject(ctx, pID, dd); err != nil { - return httperr.InternalServerError(err) + tt := make([]uuid.UUID, len(mcc.Items)) + for idx, mc := range mcc.Items { + tt[idx] = mc.TimeseriesID } - stored, err := h.MeasurementService.CreateOrUpdateTimeseriesMeasurements(ctx, mcc.Items) + valid, err := h.DBService.TimeseriesGetAllBelongToProject(ctx, db.TimeseriesGetAllBelongToProjectParams{ + ProjectID: pID, + TimeseriesIds: tt, + }) if err != nil { return httperr.InternalServerError(err) } + if !valid { + return httperr.BadRequest(errors.New("one or more timeseries do not belong to an instrument in this project")) + } - return c.JSON(http.StatusCreated, stored) + if err := h.DBService.TimeseriesMeasurementCreateOrUpdateBatch(ctx, mcc.Items); err != nil { + return httperr.InternalServerError(err) + } + + return c.JSON(http.StatusCreated, map[string]interface{}{}) } // CreateOrUpdateTimeseriesMeasurements godoc @@ -56,24 +71,23 @@ func (h *ApiHandler) CreateOrUpdateProjectTimeseriesMeasurements(c echo.Context) // @Summary creates or updates one or more timeseries measurements // @Tags measurement // @Produce json -// @Param timeseries_measurement_collections body model.TimeseriesMeasurementCollectionCollection true "array of timeseries measurement collections" +// @Param timeseries_measurement_collections body dto.TimeseriesMeasurementCollectionCollection true "array of timeseries measurement collections" // @Param key query string true "api key" -// @Success 200 {array} model.MeasurementCollection +// @Success 200 {object} map[string]interface{} // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError // @Router /timeseries_measurements [post] func (h *ApiHandler) CreateOrUpdateTimeseriesMeasurements(c echo.Context) error { - var mcc model.TimeseriesMeasurementCollectionCollection + var mcc dto.TimeseriesMeasurementCollectionCollection if err := c.Bind(&mcc); err != nil { return httperr.MalformedBody(err) } - stored, err := h.MeasurementService.CreateOrUpdateTimeseriesMeasurements(c.Request().Context(), mcc.Items) - if err != nil { + if err := h.DBService.TimeseriesMeasurementCreateOrUpdateBatch(c.Request().Context(), mcc.Items); err != nil { return httperr.InternalServerError(err) } - return c.JSON(http.StatusCreated, stored) + return c.JSON(http.StatusCreated, map[string]interface{}{}) } // UpdateTimeseriesMeasurements godoc @@ -84,29 +98,32 @@ func (h *ApiHandler) CreateOrUpdateTimeseriesMeasurements(c echo.Context) error // @Param project_id path string true "project uuid" Format(uuid) // @Param after query string false "after timestamp" Format(date-time) // @Param before query string false "before timestamp" Format(date-time) -// @Param timeseries_measurement_collections body model.TimeseriesMeasurementCollectionCollection true "array of timeseries measurement collections" +// @Param timeseries_measurement_collections body dto.TimeseriesMeasurementCollectionCollection true "array of timeseries measurement collections" // @Param key query string false "api key" -// @Success 200 {array} model.MeasurementCollection +// @Success 200 {array} map[string]interface{} // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError // @Router /projects/{project_id}/timeseries_measurements [put] // @Security Bearer func (h *ApiHandler) UpdateTimeseriesMeasurements(c echo.Context) error { - var tw model.TimeWindow + var tw *util.TimeWindow + var twParam util.TimeWindow a, b := c.QueryParam("after"), c.QueryParam("before") - if err := tw.SetWindow(a, b, time.Now().AddDate(0, 0, -7), time.Now()); err != nil { - return httperr.MalformedDate(err) + if a != "" && b != "" { + if err := twParam.SetWindow(a, b, time.Now().AddDate(0, 0, -7), time.Now()); err != nil { + return httperr.MalformedDate(err) + } + tw = &twParam } - var mcc model.TimeseriesMeasurementCollectionCollection + var mcc dto.TimeseriesMeasurementCollectionCollection if err := c.Bind(&mcc); err != nil { return httperr.MalformedBody(err) } - stored, err := h.MeasurementService.UpdateTimeseriesMeasurements(c.Request().Context(), mcc.Items, tw) - if err != nil { + if err := h.DBService.TimeseriesMeasurementUpdateBatch(c.Request().Context(), mcc.Items, tw); err != nil { return httperr.InternalServerError(err) } - return c.JSON(http.StatusOK, stored) + return c.JSON(http.StatusOK, map[string]interface{}{}) } // DeleteTimeserieMeasurements godoc @@ -134,8 +151,62 @@ func (h *ApiHandler) DeleteTimeserieMeasurements(c echo.Context) error { if err != nil { return httperr.MalformedDate(err) } - if err := h.MeasurementService.DeleteTimeserieMeasurements(c.Request().Context(), id, t); err != nil { + if err := h.DBService.TimeseriesMeasurementDelete(c.Request().Context(), db.TimeseriesMeasurementDeleteParams{ + TimeseriesID: id, + Time: t, + }); err != nil { return httperr.InternalServerError(err) } return c.JSON(http.StatusOK, make(map[string]interface{})) } + +// CreateOrUpdateTimeseriesMeasurements godoc +// +// @Summary creates one or more timeseries measurements +// @Tags measurement +// @Accept json,mpfd +// @Produce json +// @Param timeseries_measurement_collections body dto.TimeseriesMeasurementCollectionCollection false "json array of timeseries measurement collections" +// @Param timeseries_measurement_collections formData file false "TOA5 file of timeseries measurement collections" +// @Success 200 {array} map[string]interface{} +// @Failure 400 {object} echo.HTTPError +// @Failure 404 {object} echo.HTTPError +// @Failure 500 {object} echo.HTTPError +// @Router /timeseries_measurements [post] +// @Security Bearer +func (h *ApiHandler) _CreateOrUpdateTimeseriesMeasurements(c echo.Context) error { + contentType := "application/json" + contentTypeHeader, ok := c.Request().Header["Content-Type"] + if ok && len(contentTypeHeader) > 0 { + contentType = strings.ToLower(contentTypeHeader[0]) + } + + if strings.Contains(contentType, "multipart/form-data") { + return h.createOrUpdateTimeseriesMeasurementsMultipartFormData(c) + } + + return h.CreateOrUpdateTimeseriesMeasurements(c) +} + +func (h *ApiHandler) createOrUpdateTimeseriesMeasurementsMultipartFormData(c echo.Context) error { + file, err := c.FormFile("file") + if err != nil { + return echo.NewHTTPError(http.StatusBadRequest, err.Error()) + } + + src, err := file.Open() + if err != nil { + return echo.NewHTTPError(http.StatusBadRequest, err.Error()) + } + defer func() { + if err := src.Close(); err != nil { + log.Printf("error closing file: %s", err.Error()) + } + }() + + if err := h.DBService.TimeseriesMeasurementCreateOrUpdateDataloggerTOA5Upload(c.Request().Context(), src); err != nil { + return echo.NewHTTPError(http.StatusInternalServerError, err.Error()) + } + + return c.JSON(http.StatusCreated, map[string]interface{}{}) +} diff --git a/api/internal/handler/measurement_inclinometer.go b/api/internal/handler/measurement_inclinometer.go deleted file mode 100644 index 85b813f7..00000000 --- a/api/internal/handler/measurement_inclinometer.go +++ /dev/null @@ -1,158 +0,0 @@ -package handler - -import ( - "encoding/json" - "net/http" - "time" - - "github.com/USACE/instrumentation-api/api/internal/httperr" - "github.com/USACE/instrumentation-api/api/internal/model" - - "github.com/google/uuid" - "github.com/labstack/echo/v4" -) - -// ListInclinometerMeasurements godoc -// -// @Summary lists all measurements for an inclinometer -// @Tags measurement-inclinometer -// @Produce json -// @Param timeseries_id path string true "timeseries uuid" Format(uuid) -// @Param after query string false "after timestamp" Format(date-time) -// @Param before query string false "before timestamp" Format(date-time) -// @Success 200 {object} model.InclinometerMeasurementCollection -// @Failure 400 {object} echo.HTTPError -// @Failure 404 {object} echo.HTTPError -// @Failure 500 {object} echo.HTTPError -// @Router /timeseries/{timeseries_id}/inclinometer_measurements [get] -func (h *ApiHandler) ListInclinometerMeasurements(c echo.Context) error { - - tsID, err := uuid.Parse(c.Param("timeseries_id")) - if err != nil { - return httperr.MalformedID(err) - } - - var tw model.TimeWindow - a, b := c.QueryParam("after"), c.QueryParam("before") - if err = tw.SetWindow(a, b, time.Now().AddDate(0, 0, -7), time.Now()); err != nil { - return httperr.MalformedDate(err) - } - - ctx := c.Request().Context() - - im, err := h.InclinometerMeasurementService.ListInclinometerMeasurements(ctx, tsID, tw) - if err != nil { - return httperr.InternalServerError(err) - } - - cm, err := h.MeasurementService.GetTimeseriesConstantMeasurement(ctx, tsID, "inclinometer-constant") - if err != nil { - return httperr.InternalServerError(err) - } - - for idx := range im.Inclinometers { - values, err := h.InclinometerMeasurementService.ListInclinometerMeasurementValues(ctx, tsID, im.Inclinometers[idx].Time, float64(cm.Value)) - if err != nil { - return httperr.InternalServerError(err) - } - - jsonValues, err := json.Marshal(values) - if err != nil { - return httperr.InternalServerError(err) - } - im.Inclinometers[idx].Values = jsonValues - } - - return c.JSON(http.StatusOK, im) -} - -// CreateOrUpdateProjectInclinometerMeasurements godoc -// -// @Summary creates or updates one or more inclinometer measurements -// @Tags measurement-inclinometer -// @Produce json -// @Param project_id path string true "project uuid" Format(uuid) -// @Param timeseries_measurement_collections body model.InclinometerMeasurementCollectionCollection true "inclinometer measurement collections" -// @Param key query string false "api key" -// @Success 200 {array} model.InclinometerMeasurementCollection -// @Failure 400 {object} echo.HTTPError -// @Failure 404 {object} echo.HTTPError -// @Failure 500 {object} echo.HTTPError -// @Router /projects/{project_id}/inclinometer_measurements [post] -// @Security Bearer -func (h *ApiHandler) CreateOrUpdateProjectInclinometerMeasurements(c echo.Context) error { - var mcc model.InclinometerMeasurementCollectionCollection - if err := c.Bind(&mcc); err != nil { - return httperr.MalformedBody(err) - } - - pID, err := uuid.Parse(c.Param("project_id")) - if err != nil { - return httperr.MalformedID(err) - } - - ctx := c.Request().Context() - - dd := mcc.TimeseriesIDs() - if err := h.TimeseriesService.AssertTimeseriesLinkedToProject(ctx, pID, dd); err != nil { - return httperr.InternalServerError(err) - } - - p := c.Get("profile").(model.Profile) - - stored, err := h.InclinometerMeasurementService.CreateOrUpdateInclinometerMeasurements(ctx, mcc.Items, p, time.Now()) - if err != nil { - return httperr.InternalServerError(err) - } - - //create inclinometer constant if doesn't exist - if len(mcc.Items) > 0 { - cm, err := h.MeasurementService.GetTimeseriesConstantMeasurement(ctx, mcc.Items[0].TimeseriesID, "inclinometer-constant") - if err != nil { - return httperr.InternalServerError(err) - } - - if cm.TimeseriesID == uuid.Nil { - err := h.InclinometerMeasurementService.CreateTimeseriesConstant(ctx, mcc.Items[0].TimeseriesID, "inclinometer-constant", "Meters", 20000) - if err != nil { - return httperr.InternalServerError(err) - } - } - - } - return c.JSON(http.StatusCreated, stored) -} - -// DeleteInclinometerMeasurements godoc -// -// @Summary deletes a single inclinometer measurement by timestamp -// @Tags measurement-inclinometer -// @Produce json -// @Param timeseries_id path string true "timeseries uuid" Format(uuid) -// @Param time query string true "timestamp of measurement to delete" Format(date-time) -// @Param key query string false "api key" -// @Success 200 {object} map[string]interface{} -// @Failure 400 {object} echo.HTTPError -// @Failure 404 {object} echo.HTTPError -// @Failure 500 {object} echo.HTTPError -// @Router /timeseries/{timeseries_id}/inclinometer_measurements [delete] -// @Security Bearer -func (h *ApiHandler) DeleteInclinometerMeasurements(c echo.Context) error { - // id from url params - id, err := uuid.Parse(c.Param("timeseries_id")) - if err != nil { - return httperr.MalformedID(err) - } - - timeString := c.QueryParam("time") - - t, err := time.Parse(time.RFC3339, timeString) - if err != nil { - return httperr.MalformedDate(err) - } - - if err := h.InclinometerMeasurementService.DeleteInclinometerMeasurement(c.Request().Context(), id, t); err != nil { - return httperr.InternalServerError(err) - } - return c.JSON(http.StatusOK, make(map[string]interface{})) -} diff --git a/api/internal/handler/media.go b/api/internal/handler/media.go index 81f4c6d3..7024f416 100644 --- a/api/internal/handler/media.go +++ b/api/internal/handler/media.go @@ -4,7 +4,6 @@ import ( "net/http" "github.com/USACE/instrumentation-api/api/internal/httperr" - _ "github.com/USACE/instrumentation-api/api/internal/model" "github.com/labstack/echo/v4" ) @@ -15,7 +14,7 @@ import ( // @Produce jpeg // @Param project_slug path string true "project abbr" // @Param uri_path path string true "uri path of requested resource" -// @Success 200 +// @Success 200 {file} file // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError diff --git a/api/internal/handler/opendcs.go b/api/internal/handler/opendcs.go deleted file mode 100644 index f2067af2..00000000 --- a/api/internal/handler/opendcs.go +++ /dev/null @@ -1,28 +0,0 @@ -package handler - -import ( - "net/http" - - "github.com/USACE/instrumentation-api/api/internal/httperr" - _ "github.com/USACE/instrumentation-api/api/internal/model" - - "github.com/labstack/echo/v4" -) - -// ListOpendcsSites godoc -// -// @Summary lists all instruments, represented as opendcs sites -// @Tags opendcs -// @Produce xml -// @Success 200 {array} model.Site -// @Failure 400 {object} echo.HTTPError -// @Failure 404 {object} echo.HTTPError -// @Failure 500 {object} echo.HTTPError -// @Router /opendcs/sites [get] -func (h *ApiHandler) ListOpendcsSites(c echo.Context) error { - ss, err := h.OpendcsService.ListOpendcsSites(c.Request().Context()) - if err != nil { - return httperr.InternalServerError(err) - } - return c.XMLPretty(http.StatusOK, ss, " ") -} diff --git a/api/internal/handler/plot_config.go b/api/internal/handler/plot_config.go index a35dc029..2fcc990a 100644 --- a/api/internal/handler/plot_config.go +++ b/api/internal/handler/plot_config.go @@ -3,8 +3,8 @@ package handler import ( "net/http" + "github.com/USACE/instrumentation-api/api/internal/db" "github.com/USACE/instrumentation-api/api/internal/httperr" - _ "github.com/USACE/instrumentation-api/api/internal/model" "github.com/google/uuid" "github.com/labstack/echo/v4" @@ -16,7 +16,7 @@ import ( // @Tags plot-config // @Produce json // @Param project_id path string true "project uuid" Format(uuid) -// @Success 200 {array} model.PlotConfig +// @Success 200 {array} db.VPlotConfiguration // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -27,7 +27,7 @@ func (h *ApiHandler) ListPlotConfigs(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - cc, err := h.PlotConfigService.ListPlotConfigs(c.Request().Context(), pID) + cc, err := h.DBService.PlotConfigListForProject(c.Request().Context(), pID) if err != nil { return httperr.InternalServerError(err) } @@ -41,7 +41,7 @@ func (h *ApiHandler) ListPlotConfigs(c echo.Context) error { // @Produce json // @Param project_id path string true "project uuid" Format(uuid) // @Param plot_configuration_id path string true "plot config uuid" Format(uuid) -// @Success 200 {object} model.PlotConfig +// @Success 200 {object} db.VPlotConfiguration // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -52,7 +52,7 @@ func (h *ApiHandler) GetPlotConfig(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - g, err := h.PlotConfigService.GetPlotConfig(c.Request().Context(), cID) + g, err := h.DBService.PlotConfigGet(c.Request().Context(), cID) if err != nil { return httperr.InternalServerError(err) } @@ -83,7 +83,10 @@ func (h *ApiHandler) DeletePlotConfig(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - if err := h.PlotConfigService.DeletePlotConfig(c.Request().Context(), pID, cID); err != nil { + if err := h.DBService.PlotConfigDelete(c.Request().Context(), db.PlotConfigDeleteParams{ + ID: cID, + ProjectID: pID, + }); err != nil { return httperr.InternalServerError(err) } return c.JSON(http.StatusOK, make(map[string]interface{})) diff --git a/api/internal/handler/plot_config_bullseye.go b/api/internal/handler/plot_config_bullseye.go index 122e5146..80059118 100644 --- a/api/internal/handler/plot_config_bullseye.go +++ b/api/internal/handler/plot_config_bullseye.go @@ -4,8 +4,10 @@ import ( "net/http" "time" + "github.com/USACE/instrumentation-api/api/internal/db" + "github.com/USACE/instrumentation-api/api/internal/dto" "github.com/USACE/instrumentation-api/api/internal/httperr" - "github.com/USACE/instrumentation-api/api/internal/model" + "github.com/USACE/instrumentation-api/api/internal/util" "github.com/google/uuid" "github.com/labstack/echo/v4" ) @@ -16,16 +18,16 @@ import ( // @Tags plot-config // @Produce json // @Param project_id path string true "project uuid" Format(uuid) -// @Param plot_config body model.PlotConfigBullseyePlot true "plot config payload" +// @Param plot_config body dto.PlotConfigBullseyePlot true "plot config payload" // @Param key query string false "api key" -// @Success 200 {object} model.PlotConfig +// @Success 201 {object} db.VPlotConfiguration // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError // @Router /projects/{project_id}/plot_configs/bullseye_plots [post] // @Security Bearer func (h *ApiHandler) CreatePlotConfigBullseyePlot(c echo.Context) error { - var pc model.PlotConfigBullseyePlot + var pc dto.PlotConfigBullseyePlot if err := c.Bind(&pc); err != nil { return httperr.MalformedBody(err) } @@ -42,10 +44,10 @@ func (h *ApiHandler) CreatePlotConfigBullseyePlot(c echo.Context) error { } pc.ProjectID = pID - p := c.Get("profile").(model.Profile) - pc.CreatorID, pc.CreateDate = p.ID, time.Now() + p := c.Get("profile").(db.VProfile) + pc.CreatedBy, pc.CreatedAt = p.ID, time.Now() - pcNew, err := h.PlotConfigService.CreatePlotConfigBullseyePlot(c.Request().Context(), pc) + pcNew, err := h.DBService.PlotConfigCreateBullseye(c.Request().Context(), pc) if err != nil { return httperr.InternalServerError(err) } @@ -59,16 +61,16 @@ func (h *ApiHandler) CreatePlotConfigBullseyePlot(c echo.Context) error { // @Produce json // @Param project_id path string true "project uuid" Format(uuid) // @Param plot_configuration_id path string true "plot config uuid" Format(uuid) -// @Param plot_config body model.PlotConfigBullseyePlot true "plot config payload" +// @Param plot_config body dto.PlotConfigBullseyePlot true "plot config payload" // @Param key query string false "api key" -// @Success 200 {object} model.PlotConfig +// @Success 200 {object} db.VPlotConfiguration // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError // @Router /projects/{project_id}/plot_configs/bullseye_plots/{plot_configuration_id} [put] // @Security Bearer func (h *ApiHandler) UpdatePlotConfigBullseyePlot(c echo.Context) error { - var pc model.PlotConfigBullseyePlot + var pc dto.PlotConfigBullseyePlot if err := c.Bind(&pc); err != nil { return httperr.MalformedBody(err) } @@ -91,11 +93,11 @@ func (h *ApiHandler) UpdatePlotConfigBullseyePlot(c echo.Context) error { } pc.ID = pcID - p := c.Get("profile").(model.Profile) + p := c.Get("profile").(db.VProfile) tNow := time.Now() - pc.UpdaterID, pc.UpdateDate = &p.ID, &tNow + pc.UpdatedBy, pc.UpdatedAt = &p.ID, &tNow - pcUpdated, err := h.PlotConfigService.UpdatePlotConfigBullseyePlot(c.Request().Context(), pc) + pcUpdated, err := h.DBService.PlotConfigUpdateBullseye(c.Request().Context(), pc) if err != nil { return httperr.InternalServerError(err) } @@ -110,7 +112,7 @@ func (h *ApiHandler) UpdatePlotConfigBullseyePlot(c echo.Context) error { // @Param project_id path string true "project uuid" Format(uuid) // @Param plot_configuration_id path string true "plot config uuid" Format(uuid) // @Param key query string false "api key" -// @Success 200 {object} []model.PlotConfigMeasurementBullseyePlot +// @Success 200 {array} db.PlotConfigMeasurementListBullseyeRow // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -125,12 +127,16 @@ func (h *ApiHandler) ListPlotConfigMeasurementsBullseyePlot(c echo.Context) erro if err != nil { return httperr.MalformedID(err) } - var tw model.TimeWindow + var tw util.TimeWindow a, b := c.QueryParam("after"), c.QueryParam("before") if err := tw.SetWindow(a, b, time.Now().AddDate(0, 0, -7), time.Now()); err != nil { return httperr.MalformedDate(err) } - mm, err := h.PlotConfigService.ListPlotConfigMeasurementsBullseyePlot(c.Request().Context(), pcID, tw) + mm, err := h.DBService.PlotConfigMeasurementListBullseye(c.Request().Context(), db.PlotConfigMeasurementListBullseyeParams{ + PlotConfigID: pcID, + After: tw.After, + Before: tw.Before, + }) if err != nil { return httperr.InternalServerError(err) } diff --git a/api/internal/handler/plot_config_contour.go b/api/internal/handler/plot_config_contour.go index 81fb430c..476583c2 100644 --- a/api/internal/handler/plot_config_contour.go +++ b/api/internal/handler/plot_config_contour.go @@ -4,8 +4,11 @@ import ( "net/http" "time" + "github.com/USACE/instrumentation-api/api/internal/db" + "github.com/USACE/instrumentation-api/api/internal/dto" "github.com/USACE/instrumentation-api/api/internal/httperr" - "github.com/USACE/instrumentation-api/api/internal/model" + _ "github.com/USACE/instrumentation-api/api/internal/service" + "github.com/USACE/instrumentation-api/api/internal/util" "github.com/google/uuid" "github.com/labstack/echo/v4" ) @@ -16,16 +19,16 @@ import ( // @Tags plot-config // @Produce json // @Param project_id path string true "project uuid" Format(uuid) -// @Param plot_config body model.PlotConfigContourPlot true "plot config payload" +// @Param plot_config body dto.PlotConfigContourPlot true "plot config payload" // @Param key query string false "api key" -// @Success 200 {object} model.PlotConfig +// @Success 201 {object} db.VPlotConfiguration // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError // @Router /projects/{project_id}/plot_configs/contour_plots [post] // @Security Bearer func (h *ApiHandler) CreatePlotConfigContourPlot(c echo.Context) error { - var pc model.PlotConfigContourPlot + var pc dto.PlotConfigContourPlot if err := c.Bind(&pc); err != nil { return httperr.MalformedBody(err) } @@ -42,10 +45,10 @@ func (h *ApiHandler) CreatePlotConfigContourPlot(c echo.Context) error { } pc.ProjectID = pID - p := c.Get("profile").(model.Profile) - pc.CreatorID, pc.CreateDate = p.ID, time.Now() + p := c.Get("profile").(db.VProfile) + pc.CreatedBy, pc.CreatedAt = p.ID, time.Now() - pcNew, err := h.PlotConfigService.CreatePlotConfigContourPlot(c.Request().Context(), pc) + pcNew, err := h.DBService.PlotConfigCreateContour(c.Request().Context(), pc) if err != nil { return httperr.InternalServerError(err) } @@ -59,16 +62,16 @@ func (h *ApiHandler) CreatePlotConfigContourPlot(c echo.Context) error { // @Produce json // @Param project_id path string true "project uuid" Format(uuid) // @Param plot_configuration_id path string true "plot config uuid" Format(uuid) -// @Param plot_config body model.PlotConfigContourPlot true "plot config payload" +// @Param plot_config body dto.PlotConfigContourPlot true "plot config payload" // @Param key query string false "api key" -// @Success 200 {object} model.PlotConfig +// @Success 200 {object} db.VPlotConfiguration // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError // @Router /projects/{project_id}/plot_configs/contour_plots/{plot_configuration_id} [put] // @Security Bearer func (h *ApiHandler) UpdatePlotConfigContourPlot(c echo.Context) error { - var pc model.PlotConfigContourPlot + var pc dto.PlotConfigContourPlot if err := c.Bind(&pc); err != nil { return httperr.MalformedBody(err) } @@ -91,11 +94,11 @@ func (h *ApiHandler) UpdatePlotConfigContourPlot(c echo.Context) error { } pc.ID = pcID - p := c.Get("profile").(model.Profile) + p := c.Get("profile").(db.VProfile) tNow := time.Now() - pc.UpdaterID, pc.UpdateDate = &p.ID, &tNow + pc.UpdatedBy, pc.UpdatedAt = &p.ID, &tNow - pcUpdated, err := h.PlotConfigService.UpdatePlotConfigContourPlot(c.Request().Context(), pc) + pcUpdated, err := h.DBService.PlotConfigUpdateContour(c.Request().Context(), pc) if err != nil { return httperr.InternalServerError(err) } @@ -127,12 +130,16 @@ func (h *ApiHandler) ListPlotConfigTimesContourPlot(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - var tw model.TimeWindow + var tw util.TimeWindow a, b := c.QueryParam("after"), c.QueryParam("before") if err := tw.SetWindow(a, b, time.Now().AddDate(0, 0, -7), time.Now()); err != nil { return httperr.MalformedDate(err) } - tt, err := h.PlotConfigService.ListPlotConfigTimesContourPlot(c.Request().Context(), pcID, tw) + tt, err := h.DBService.PlotContourConfigListTimeRange(c.Request().Context(), db.PlotContourConfigListTimeRangeParams{ + PlotContourConfigID: pcID, + After: tw.After, + Before: tw.Before, + }) if err != nil { return httperr.InternalServerError(err) } @@ -148,7 +155,7 @@ func (h *ApiHandler) ListPlotConfigTimesContourPlot(c echo.Context) error { // @Param plot_configuration_id path string true "plot config uuid" Format(uuid) // @Param time query string true "time" // @Param key query string false "api key" -// @Success 200 {object} model.AggregatePlotConfigMeasurementsContourPlot +// @Success 200 {object} service.AggregatePlotConfigMeasurementsContourPlot // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -168,7 +175,7 @@ func (h *ApiHandler) GetPlotConfigMeasurementsContourPlot(c echo.Context) error if err != nil { return httperr.MalformedDate(err) } - mm, err := h.PlotConfigService.GetPlotConfigMeasurementsContourPlot(c.Request().Context(), pcID, t) + mm, err := h.DBService.PlotConfigMeasurementListContour(c.Request().Context(), pcID, t) if err != nil { return httperr.InternalServerError(err) } diff --git a/api/internal/handler/plot_config_profile.go b/api/internal/handler/plot_config_profile.go index 86e1b913..8f98c217 100644 --- a/api/internal/handler/plot_config_profile.go +++ b/api/internal/handler/plot_config_profile.go @@ -4,8 +4,10 @@ import ( "net/http" "time" + "github.com/USACE/instrumentation-api/api/internal/db" + _ "github.com/USACE/instrumentation-api/api/internal/db" + "github.com/USACE/instrumentation-api/api/internal/dto" "github.com/USACE/instrumentation-api/api/internal/httperr" - "github.com/USACE/instrumentation-api/api/internal/model" "github.com/google/uuid" "github.com/labstack/echo/v4" ) @@ -16,16 +18,16 @@ import ( // @Tags plot-config // @Produce json // @Param project_id path string true "project uuid" Format(uuid) -// @Param plot_config body model.PlotConfigProfilePlot true "plot config payload" +// @Param plot_config body dto.PlotConfigProfilePlot true "plot config payload" // @Param key query string false "api key" -// @Success 200 {object} model.PlotConfig +// @Success 201 {object} db.VPlotConfiguration // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError // @Router /projects/{project_id}/plot_configs/profile_plots [post] // @Security Bearer func (h *ApiHandler) CreatePlotConfigProfilePlot(c echo.Context) error { - var pc model.PlotConfigProfilePlot + var pc dto.PlotConfigProfilePlot if err := c.Bind(&pc); err != nil { return httperr.MalformedBody(err) } @@ -42,10 +44,10 @@ func (h *ApiHandler) CreatePlotConfigProfilePlot(c echo.Context) error { } pc.ProjectID = pID - p := c.Get("profile").(model.Profile) - pc.CreatorID, pc.CreateDate = p.ID, time.Now() + p := c.Get("profile").(db.VProfile) + pc.CreatedBy, pc.CreatedAt = p.ID, time.Now() - pcNew, err := h.PlotConfigService.CreatePlotConfigProfilePlot(c.Request().Context(), pc) + pcNew, err := h.DBService.PlotConfigCreateProfile(c.Request().Context(), pc) if err != nil { return httperr.InternalServerError(err) } @@ -59,16 +61,16 @@ func (h *ApiHandler) CreatePlotConfigProfilePlot(c echo.Context) error { // @Produce json // @Param project_id path string true "project uuid" Format(uuid) // @Param plot_configuration_id path string true "plot config uuid" Format(uuid) -// @Param plot_config body model.PlotConfigProfilePlot true "plot config payload" +// @Param plot_config body dto.PlotConfigProfilePlot true "plot config payload" // @Param key query string false "api key" -// @Success 200 {object} model.PlotConfig +// @Success 200 {object} db.VPlotConfiguration // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError // @Router /projects/{project_id}/plot_configs/profile_plots/{plot_configuration_id} [put] // @Security Bearer func (h *ApiHandler) UpdatePlotConfigProfilePlot(c echo.Context) error { - var pc model.PlotConfigProfilePlot + var pc dto.PlotConfigProfilePlot if err := c.Bind(&pc); err != nil { return httperr.MalformedBody(err) } @@ -91,11 +93,11 @@ func (h *ApiHandler) UpdatePlotConfigProfilePlot(c echo.Context) error { } pc.ID = pcID - p := c.Get("profile").(model.Profile) + p := c.Get("profile").(db.VProfile) tNow := time.Now() - pc.UpdaterID, pc.UpdateDate = &p.ID, &tNow + pc.UpdatedBy, pc.UpdatedAt = &p.ID, &tNow - pcUpdated, err := h.PlotConfigService.UpdatePlotConfigProfilePlot(c.Request().Context(), pc) + pcUpdated, err := h.DBService.PlotConfigUpdateProfile(c.Request().Context(), pc) if err != nil { return httperr.InternalServerError(err) } diff --git a/api/internal/handler/plot_config_scatter_line.go b/api/internal/handler/plot_config_scatter_line.go index 0bacd329..b494a1fd 100644 --- a/api/internal/handler/plot_config_scatter_line.go +++ b/api/internal/handler/plot_config_scatter_line.go @@ -4,8 +4,10 @@ import ( "net/http" "time" + "github.com/USACE/instrumentation-api/api/internal/db" + _ "github.com/USACE/instrumentation-api/api/internal/db" + "github.com/USACE/instrumentation-api/api/internal/dto" "github.com/USACE/instrumentation-api/api/internal/httperr" - "github.com/USACE/instrumentation-api/api/internal/model" "github.com/google/uuid" "github.com/labstack/echo/v4" ) @@ -16,9 +18,9 @@ import ( // @Tags plot-config // @Produce json // @Param project_id path string true "project uuid" Format(uuid) -// @Param plot_config body model.PlotConfigScatterLinePlot true "plot config payload" +// @Param plot_config body dto.PlotConfigScatterLinePlot true "plot config payload" // @Param key query string false "api key" -// @Success 200 {object} model.PlotConfig +// @Success 201 {object} db.VPlotConfiguration // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -26,7 +28,7 @@ import ( // @Router /projects/{project_id}/plot_configurations [post] // @Security Bearer func (h *ApiHandler) CreatePlotConfigScatterLinePlot(c echo.Context) error { - var pc model.PlotConfigScatterLinePlot + var pc dto.PlotConfigScatterLinePlot if err := c.Bind(&pc); err != nil { return httperr.MalformedBody(err) } @@ -43,10 +45,10 @@ func (h *ApiHandler) CreatePlotConfigScatterLinePlot(c echo.Context) error { } pc.ProjectID = pID - p := c.Get("profile").(model.Profile) - pc.CreatorID, pc.CreateDate = p.ID, time.Now() + p := c.Get("profile").(db.VProfile) + pc.CreatedBy, pc.CreatedAt = p.ID, time.Now() - pcNew, err := h.PlotConfigService.CreatePlotConfigScatterLinePlot(c.Request().Context(), pc) + pcNew, err := h.DBService.PlotConfigCreateScatterLine(c.Request().Context(), pc) if err != nil { return httperr.InternalServerError(err) } @@ -60,9 +62,9 @@ func (h *ApiHandler) CreatePlotConfigScatterLinePlot(c echo.Context) error { // @Produce json // @Param project_id path string true "project uuid" Format(uuid) // @Param plot_configuration_id path string true "plot config uuid" Format(uuid) -// @Param plot_config body model.PlotConfigScatterLinePlot true "plot config payload" +// @Param plot_config body dto.PlotConfigScatterLinePlot true "plot config payload" // @Param key query string false "api key" -// @Success 200 {object} model.PlotConfig +// @Success 200 {object} db.VPlotConfiguration // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -70,7 +72,7 @@ func (h *ApiHandler) CreatePlotConfigScatterLinePlot(c echo.Context) error { // @Router /projects/{project_id}/plot_configurations/{plot_configuration_id} [put] // @Security Bearer func (h *ApiHandler) UpdatePlotConfigScatterLinePlot(c echo.Context) error { - var pc model.PlotConfigScatterLinePlot + var pc dto.PlotConfigScatterLinePlot if err := c.Bind(&pc); err != nil { return httperr.MalformedBody(err) } @@ -93,11 +95,11 @@ func (h *ApiHandler) UpdatePlotConfigScatterLinePlot(c echo.Context) error { } pc.ID = pcID - p := c.Get("profile").(model.Profile) + p := c.Get("profile").(db.VProfile) tNow := time.Now() - pc.UpdaterID, pc.UpdateDate = &p.ID, &tNow + pc.UpdatedBy, pc.UpdatedAt = &p.ID, &tNow - pcUpdated, err := h.PlotConfigService.UpdatePlotConfigScatterLinePlot(c.Request().Context(), pc) + pcUpdated, err := h.DBService.PlotConfigUpdateScatterLine(c.Request().Context(), pc) if err != nil { return httperr.InternalServerError(err) } diff --git a/api/internal/handler/plot_config_scatter_line_test.go b/api/internal/handler/plot_config_scatter_line_test.go index be18274a..b4012f2b 100644 --- a/api/internal/handler/plot_config_scatter_line_test.go +++ b/api/internal/handler/plot_config_scatter_line_test.go @@ -15,10 +15,10 @@ const plotConfigBaseSchema = `{ "id": { "type": "string" }, "slug": { "type": "string" }, "name": { "type": "string" }, - "creator_id": { "type": "string" }, - "create_date": { "type": "string", "format": "date-time" }, - "updater_id": { "type": ["string", "null"] }, - "update_date": { "type": ["string", "null"], "format": "date-time" }, + "created_by": { "type": "string" }, + "created_at": { "type": "string", "format": "date-time" }, + "updated_by": { "type": ["string", "null"] }, + "updated_at": { "type": ["string", "null"], "format": "date-time" }, "project_id": { "type": ["string", "null"] }, "show_masked": { "type": "boolean" }, "show_nonvalidated": { "type": "boolean" }, @@ -31,7 +31,7 @@ const plotConfigBaseSchema = `{ "display": %s }, "required": [ - "id", "slug", "name", "creator_id", "create_date", "updater_id", "update_date", "project_id", + "id", "slug", "name", "created_by", "created_at", "updated_by", "updated_at", "project_id", "show_masked", "show_nonvalidated", "show_comments", "auto_range", "date_range", "threshold", "report_configs", "plot_type", "display" ], "additionalProperties": false diff --git a/api/internal/handler/profile.go b/api/internal/handler/profile.go index 4916cc33..111eed40 100644 --- a/api/internal/handler/profile.go +++ b/api/internal/handler/profile.go @@ -5,8 +5,10 @@ import ( "errors" "net/http" + "github.com/USACE/instrumentation-api/api/internal/db" + "github.com/USACE/instrumentation-api/api/internal/dto" "github.com/USACE/instrumentation-api/api/internal/httperr" - "github.com/USACE/instrumentation-api/api/internal/model" + _ "github.com/USACE/instrumentation-api/api/internal/service" "github.com/labstack/echo/v4" ) @@ -15,14 +17,14 @@ import ( // @Summary creates a user profile // @Tags profile // @Produce json -// @Success 200 {object} model.Profile +// @Success 200 {object} db.ProfileCreateRow // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError // @Router /profiles [post] // @Security ClaimsOnly func (h *ApiHandler) CreateProfile(c echo.Context) error { - claims := c.Get("claims").(model.ProfileClaims) + claims := c.Get("claims").(dto.ProfileClaims) if !claims.X509Presented { return httperr.Forbidden(errors.New("invalid value for claim x509_presented")) @@ -31,14 +33,12 @@ func (h *ApiHandler) CreateProfile(c echo.Context) error { return httperr.Forbidden(errors.New("unable to create profile; cacUID claim is nil")) } - p := model.ProfileInfo{ + pNew, err := h.DBService.ProfileCreate(c.Request().Context(), db.ProfileCreateParams{ Username: claims.PreferredUsername, DisplayName: claims.Name, Email: claims.Email, - EDIPI: *claims.CacUID, - } - - pNew, err := h.ProfileService.CreateProfile(c.Request().Context(), p) + Edipi: int64(*claims.CacUID), + }) if err != nil { return httperr.InternalServerError(err) } @@ -50,7 +50,7 @@ func (h *ApiHandler) CreateProfile(c echo.Context) error { // @Summary gets profile for current authenticated user // @Tags profile // @Produce json -// @Success 200 {object} model.Profile +// @Success 200 {object} db.VProfile // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -58,9 +58,9 @@ func (h *ApiHandler) CreateProfile(c echo.Context) error { // @Security ClaimsOnly func (h *ApiHandler) GetMyProfile(c echo.Context) error { ctx := c.Request().Context() - claims := c.Get("claims").(model.ProfileClaims) + claims := c.Get("claims").(dto.ProfileClaims) - p, err := h.ProfileService.GetProfileWithTokensForClaims(ctx, claims) + p, err := h.DBService.ProfileGetWithTokensForClaims(ctx, claims) if err != nil { if errors.Is(err, sql.ErrNoRows) { return h.CreateProfile(c) @@ -68,7 +68,7 @@ func (h *ApiHandler) GetMyProfile(c echo.Context) error { return httperr.InternalServerError(err) } - pValidated, err := h.ProfileService.UpdateProfileForClaims(ctx, p, claims) + pValidated, err := h.DBService.ProfileUpdateForClaims(ctx, p, claims) if err != nil { return httperr.InternalServerError(err) } @@ -81,21 +81,21 @@ func (h *ApiHandler) GetMyProfile(c echo.Context) error { // @Summary creates token for a profile // @Tags profile // @Produce json -// @Success 200 {object} model.Token +// @Success 200 {object} service.Token // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError // @Router /my_tokens [post] // @Security ClaimsOnly func (h *ApiHandler) CreateToken(c echo.Context) error { - claims := c.Get("claims").(model.ProfileClaims) + claims := c.Get("claims").(dto.ProfileClaims) ctx := c.Request().Context() - p, err := h.ProfileService.GetProfileWithTokensForClaims(ctx, claims) + p, err := h.DBService.ProfileGetWithTokensForClaims(ctx, claims) if err != nil { return httperr.InternalServerError(err) } - token, err := h.ProfileService.CreateProfileToken(ctx, p.ID) + token, err := h.DBService.ProfileTokenCreate(ctx, p.ID) if err != nil { return httperr.InternalServerError(err) } @@ -115,7 +115,7 @@ func (h *ApiHandler) CreateToken(c echo.Context) error { // @Router /my_tokens/{token_id} [delete] // @Security ClaimsOnly func (h *ApiHandler) DeleteToken(c echo.Context) error { - claims := c.Get("claims").(model.ProfileClaims) + claims := c.Get("claims").(dto.ProfileClaims) ctx := c.Request().Context() tokenID := c.Param("token_id") @@ -123,11 +123,14 @@ func (h *ApiHandler) DeleteToken(c echo.Context) error { return httperr.Message(http.StatusBadRequest, "bad token id") } - p, err := h.ProfileService.GetProfileWithTokensForClaims(ctx, claims) + p, err := h.DBService.ProfileGetWithTokensForClaims(ctx, claims) if err != nil { return httperr.InternalServerError(err) } - if err := h.ProfileService.DeleteToken(ctx, p.ID, tokenID); err != nil { + if err := h.DBService.ProfileTokenDelete(ctx, db.ProfileTokenDeleteParams{ + ProfileID: p.ID, + TokenID: tokenID, + }); err != nil { return httperr.InternalServerError(err) } return c.JSON(http.StatusOK, make(map[string]interface{})) diff --git a/api/internal/handler/project.go b/api/internal/handler/project.go index c02a3cf1..7cea35c4 100644 --- a/api/internal/handler/project.go +++ b/api/internal/handler/project.go @@ -5,8 +5,10 @@ import ( "strings" "time" + "github.com/USACE/instrumentation-api/api/internal/db" + "github.com/USACE/instrumentation-api/api/internal/dto" "github.com/USACE/instrumentation-api/api/internal/httperr" - "github.com/USACE/instrumentation-api/api/internal/model" + _ "github.com/USACE/instrumentation-api/api/internal/service" "github.com/google/uuid" "github.com/labstack/echo/v4" @@ -17,13 +19,13 @@ import ( // @Summary lists all districts // @Tags project // @Produce json -// @Success 200 {array} model.District +// @Success 200 {array} db.VDistrict // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError // @Router /districts [get] func (h *ApiHandler) ListDistricts(c echo.Context) error { - dd, err := h.ProjectService.ListDistricts(c.Request().Context()) + dd, err := h.DBService.DistrictList(c.Request().Context()) if err != nil { return httperr.InternalServerError(err) } @@ -36,7 +38,7 @@ func (h *ApiHandler) ListDistricts(c echo.Context) error { // @Tags project // @Produce json // @Param federal_id query string false "federal id" -// @Success 200 {array} model.Project +// @Success 200 {array} db.VProject // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -46,14 +48,14 @@ func (h *ApiHandler) ListProjects(c echo.Context) error { fedID := c.QueryParam("federal_id") if fedID != "" { - projects, err := h.ProjectService.ListProjectsByFederalID(ctx, fedID) + projects, err := h.DBService.ProjectListForFederalID(ctx, &fedID) if err != nil { return httperr.InternalServerError(err) } return c.JSON(http.StatusOK, projects) } - projects, err := h.ProjectService.ListProjects(ctx) + projects, err := h.DBService.ProjectList(ctx) if err != nil { return httperr.InternalServerError(err) } @@ -66,7 +68,7 @@ func (h *ApiHandler) ListProjects(c echo.Context) error { // @Tags project // @Produce json // @Param role query string false "role" -// @Success 200 {array} model.Project +// @Success 200 {array} db.VProject // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -75,10 +77,10 @@ func (h *ApiHandler) ListProjects(c echo.Context) error { func (h *ApiHandler) ListMyProjects(c echo.Context) error { ctx := c.Request().Context() - p := c.Get("profile").(model.Profile) + p := c.Get("profile").(db.VProfile) if p.IsAdmin { - projects, err := h.ProjectService.ListProjects(ctx) + projects, err := h.DBService.ProjectList(ctx) if err != nil { return httperr.InternalServerError(err) } @@ -89,7 +91,10 @@ func (h *ApiHandler) ListMyProjects(c echo.Context) error { if role != "" { role = strings.ToLower(role) if role == "admin" || role == "member" { - projects, err := h.ProjectService.ListProjectsForProfileRole(ctx, p.ID, role) + projects, err := h.DBService.ProjectListForProfileRole(ctx, db.ProjectListForProfileRoleParams{ + ProfileID: p.ID, + Name: role, + }) if err != nil { return httperr.InternalServerError(err) } @@ -98,7 +103,7 @@ func (h *ApiHandler) ListMyProjects(c echo.Context) error { return httperr.Message(http.StatusBadRequest, "role parameter must be 'admin' or 'member'") } - projects, err := h.ProjectService.ListProjectsForProfile(ctx, p.ID) + projects, err := h.DBService.ProjectListForProfileAdmin(ctx, p.ID) if err != nil { return httperr.InternalServerError(err) } @@ -111,7 +116,7 @@ func (h *ApiHandler) ListMyProjects(c echo.Context) error { // @Tags project // @Produce json // @Param project_id path string true "project uuid" Format(uuid) -// @Success 200 {array} model.Project +// @Success 200 {array} db.VInstrument // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -121,7 +126,7 @@ func (h *ApiHandler) ListProjectInstruments(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - nn, err := h.ProjectService.ListProjectInstruments(c.Request().Context(), id) + nn, err := h.DBService.InstrumentListForProject(c.Request().Context(), id) if err != nil { return httperr.InternalServerError(err) } @@ -134,7 +139,7 @@ func (h *ApiHandler) ListProjectInstruments(c echo.Context) error { // @Tags project // @Produce json // @Param project_id path string true "project uuid" Format(uuid) -// @Success 200 {array} model.InstrumentGroup +// @Success 200 {array} db.VInstrumentGroup // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -144,7 +149,7 @@ func (h *ApiHandler) ListProjectInstrumentGroups(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - gg, err := h.ProjectService.ListProjectInstrumentGroups(c.Request().Context(), id) + gg, err := h.DBService.InstrumentGroupListForProject(c.Request().Context(), &id) if err != nil { return httperr.InternalServerError(err) } @@ -156,13 +161,13 @@ func (h *ApiHandler) ListProjectInstrumentGroups(c echo.Context) error { // @Summary gets the total number of non-deleted projects in the system // @Tags project // @Produce json -// @Success 200 {object} model.ProjectCount +// @Success 200 {object} service.ProjectCount // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError // @Router /projects/count [get] func (h *ApiHandler) GetProjectCount(c echo.Context) error { - pc, err := h.ProjectService.GetProjectCount(c.Request().Context()) + pc, err := h.DBService.ProjectGetCount(c.Request().Context()) if err != nil { return httperr.InternalServerError(err) } @@ -175,7 +180,7 @@ func (h *ApiHandler) GetProjectCount(c echo.Context) error { // @Tags project // @Produce json // @Param project_id path string true "project uuid" Format(uuid) -// @Success 200 {object} model.Project +// @Success 200 {object} db.VProject // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -185,7 +190,7 @@ func (h *ApiHandler) GetProject(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - project, err := h.ProjectService.GetProject(c.Request().Context(), id) + project, err := h.DBService.ProjectGet(c.Request().Context(), id) if err != nil { return httperr.InternalServerError(err) } @@ -197,32 +202,32 @@ func (h *ApiHandler) GetProject(c echo.Context) error { // @Summary accepts an array of instruments for bulk upload to the database // @Tags project // @Produce json -// @Param project_collection body model.ProjectCollection true "project collection payload" +// @Param project_collection body dto.ProjectCollection true "project collection payload" // @Param key query string false "api key" -// @Success 200 {array} model.IDSlugName +// @Success 201 {array} db.ProjectCreateBatchRow // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError // @Router /projects [post] // @Security Bearer func (h *ApiHandler) CreateProjectBulk(c echo.Context) error { - var pc model.ProjectCollection + var pc dto.ProjectCollection if err := c.Bind(&pc); err != nil { return httperr.MalformedBody(err) } - p := c.Get("profile").(model.Profile) + p := c.Get("profile").(db.VProfile) t := time.Now() for idx := range pc { if pc[idx].Name == "" { return httperr.Message(http.StatusBadRequest, "project name required") } - pc[idx].CreatorID = p.ID - pc[idx].CreateDate = t + pc[idx].CreatedBy = p.ID + pc[idx].CreatedAt = t } - pp, err := h.ProjectService.CreateProjectBulk(c.Request().Context(), pc) + pp, err := h.DBService.ProjectCreateBatch(c.Request().Context(), pc) if err != nil { return httperr.InternalServerError(err) } @@ -235,9 +240,9 @@ func (h *ApiHandler) CreateProjectBulk(c echo.Context) error { // @Tags project // @Produce json // @Param project_id path string true "project uuid" Format(uuid) -// @Param project body model.Project true "project payload" +// @Param project body dto.Project true "project payload" // @Param key query string false "api key" -// @Success 200 {object} model.Project +// @Success 200 {object} db.VProject // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -248,17 +253,17 @@ func (h *ApiHandler) UpdateProject(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - var p model.Project + var p dto.Project if err := c.Bind(&p); err != nil { return httperr.MalformedBody(err) } p.ID = id - profile := c.Get("profile").(model.Profile) + profile := c.Get("profile").(db.VProfile) t := time.Now() - p.UpdaterID, p.UpdateDate = &profile.ID, &t + p.UpdatedBy, p.UpdatedAt = &profile.ID, &t - pUpdated, err := h.ProjectService.UpdateProject(c.Request().Context(), p) + pUpdated, err := h.DBService.ProjectUpdate(c.Request().Context(), p) if err != nil { return httperr.InternalServerError(err) } @@ -283,7 +288,7 @@ func (h *ApiHandler) DeleteFlagProject(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - if err := h.ProjectService.DeleteFlagProject(c.Request().Context(), id); err != nil { + if err := h.DBService.ProjectDeleteFlag(c.Request().Context(), id); err != nil { return httperr.InternalServerError(err) } return c.JSON(http.StatusOK, make(map[string]interface{})) @@ -313,11 +318,11 @@ func (h *ApiHandler) UploadProjectImage(c echo.Context) error { if err != nil || fh == nil { return httperr.Message(http.StatusBadRequest, "attached form file 'image' required") } - if fh.Size > 2000000 { + if fh.Size > 2_000_000 { return httperr.Message(http.StatusBadRequest, "image exceeds max size of 2MB") } - if err := h.ProjectService.UploadProjectImage(c.Request().Context(), projectID, *fh, h.BlobService.UploadContext); err != nil { + if err := h.DBService.ProjectUploadImage(c.Request().Context(), projectID, *fh, h.BlobService); err != nil { return httperr.ServerErrorOrNotFound(err) } diff --git a/api/internal/handler/project_role.go b/api/internal/handler/project_role.go index 2b4ac9fb..5fe27527 100644 --- a/api/internal/handler/project_role.go +++ b/api/internal/handler/project_role.go @@ -3,8 +3,8 @@ package handler import ( "net/http" + "github.com/USACE/instrumentation-api/api/internal/db" "github.com/USACE/instrumentation-api/api/internal/httperr" - "github.com/USACE/instrumentation-api/api/internal/model" "github.com/google/uuid" "github.com/labstack/echo/v4" @@ -17,18 +17,18 @@ import ( // @Produce json // @Param project_id path string true "project uuid" Format(uuid) // @Param key query string false "api key" -// @Success 200 {array} model.ProjectMembership +// @Success 200 {array} db.ProfileProjectRoleListForProjectRow // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError // @Router /projects/{project_id}/members [get] // @Security Bearer func (h *ApiHandler) ListProjectMembers(c echo.Context) error { - id, err := uuid.Parse(c.Param("project_id")) + pID, err := uuid.Parse(c.Param("project_id")) if err != nil { return httperr.MalformedID(err) } - mm, err := h.ProjectRoleService.ListProjectMembers(c.Request().Context(), id) + mm, err := h.DBService.ProfileProjectRoleListForProject(c.Request().Context(), pID) if err != nil { return httperr.InternalServerError(err) } @@ -44,7 +44,7 @@ func (h *ApiHandler) ListProjectMembers(c echo.Context) error { // @Param profile_id path string true "profile uuid" Format(uuid) // @Param role_id path string true "role uuid" Format(uuid) // @Param key query string false "api key" -// @Success 200 {object} model.ProjectMembership +// @Success 201 {object} db.ProfileProjectRoleGetRow // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -63,11 +63,14 @@ func (h *ApiHandler) AddProjectMemberRole(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } + grantedBy := c.Get("profile").(db.VProfile) - // profile granting role to profile_id - grantedBy := c.Get("profile").(model.Profile) - - r, err := h.ProjectRoleService.AddProjectMemberRole(c.Request().Context(), projectID, profileID, roleID, grantedBy.ID) + r, err := h.DBService.ProfileProjectRoleCreate(c.Request().Context(), db.ProfileProjectRoleCreateParams{ + ProjectID: projectID, + ProfileID: profileID, + RoleID: roleID, + GrantedBy: &grantedBy.ID, + }) if err != nil { return httperr.InternalServerError(err) } @@ -104,7 +107,11 @@ func (h *ApiHandler) RemoveProjectMemberRole(c echo.Context) error { return httperr.MalformedID(err) } - if err := h.ProjectRoleService.RemoveProjectMemberRole(c.Request().Context(), projectID, profileID, roleID); err != nil { + if err := h.DBService.ProfileProjectRoleDelete(c.Request().Context(), db.ProfileProjectRoleDeleteParams{ + ProjectID: projectID, + ProfileID: profileID, + RoleID: roleID, + }); err != nil { return httperr.InternalServerError(err) } return c.JSON(http.StatusOK, make(map[string]interface{})) diff --git a/api/internal/handler/project_test.go b/api/internal/handler/project_test.go index 0b79f3c2..863efb64 100644 --- a/api/internal/handler/project_test.go +++ b/api/internal/handler/project_test.go @@ -38,16 +38,16 @@ const projectSchema = `{ "district_id": { "type": [ "string", "null"] }, "slug": { "type": "string" }, "name": { "type": "string" }, - "creator_id": { "type": "string" }, - "creator_username": { "type": "string" }, - "create_date": { "type": "string", "format": "date-time" }, - "updater_id": { "type": ["string", "null"] }, - "updater_username": { "type": ["string", "null"] }, - "update_date": { "type": ["string", "null"], "format": "date-time" }, + "created_by": { "type": "string" }, + "created_by_username": { "type": "string" }, + "created_at": { "type": "string", "format": "date-time" }, + "updated_by": { "type": ["string", "null"] }, + "updated_by_username": { "type": ["string", "null"] }, + "updated_at": { "type": ["string", "null"], "format": "date-time" }, "instrument_count": { "type": "number" }, "instrument_group_count": { "type": "number" } }, - "required": ["id", "federal_id", "image", "office_id", "slug", "name", "creator_id", "create_date", "updater_id", "update_date", "instrument_count", "instrument_group_count"], + "required": ["id", "federal_id", "image", "office_id", "slug", "name", "created_by", "created_at", "updated_by", "updated_at", "instrument_count", "instrument_group_count"], "additionalProperties": false }` @@ -134,7 +134,7 @@ func TestProjects(t *testing.T) { ExpectedSchema: arrSchema, }, { - Name: "ListProjectsByFederalID", + Name: "ListProjectsForFederalID", URL: fmt.Sprintf("/projects?federal_id=%s", testProjectFederalID), Method: http.MethodGet, ExpectedStatus: http.StatusOK, diff --git a/api/internal/handler/report_config.go b/api/internal/handler/report_config.go index 0c6add4a..e15fc56d 100644 --- a/api/internal/handler/report_config.go +++ b/api/internal/handler/report_config.go @@ -7,8 +7,10 @@ import ( "strings" "time" + "github.com/USACE/instrumentation-api/api/internal/db" + "github.com/USACE/instrumentation-api/api/internal/dto" "github.com/USACE/instrumentation-api/api/internal/httperr" - "github.com/USACE/instrumentation-api/api/internal/model" + "github.com/USACE/instrumentation-api/api/internal/service" "github.com/google/uuid" "github.com/labstack/echo/v4" @@ -22,7 +24,7 @@ import ( // @Param project_id path string true "project uuid" Format(uuid) // @Param key query string false "api key" // @Accept application/json -// @Success 200 {object} model.ReportConfig +// @Success 200 {object} db.VReportConfig // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -33,7 +35,7 @@ func (h *ApiHandler) ListProjectReportConfigs(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - rcs, err := h.ReportConfigService.ListProjectReportConfigs(c.Request().Context(), pID) + rcs, err := h.DBService.ReportConfigListForProject(c.Request().Context(), pID) if err != nil { return httperr.InternalServerError(err) } @@ -46,10 +48,10 @@ func (h *ApiHandler) ListProjectReportConfigs(c echo.Context) error { // @Tags report-config // @Produce json // @Param project_id path string true "project uuid" Format(uuid) -// @Param report_config body model.ReportConfig true "report config payload" +// @Param report_config body dto.ReportConfig true "report config payload" // @Param key query string false "api key" // @Accept application/json -// @Success 201 {object} model.ReportConfig +// @Success 201 {object} db.VReportConfig // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -60,17 +62,17 @@ func (h *ApiHandler) CreateReportConfig(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - var rc model.ReportConfig + var rc dto.ReportConfig if err := c.Bind(&rc); err != nil { return httperr.MalformedBody(err) } rc.ProjectID = pID - profile := c.Get("profile").(model.Profile) + profile := c.Get("profile").(db.VProfile) t := time.Now() - rc.CreatorID, rc.CreateDate = profile.ID, t + rc.CreatedBy, rc.CreatedAt = profile.ID, t - rcNew, err := h.ReportConfigService.CreateReportConfig(c.Request().Context(), rc) + rcNew, err := h.DBService.ReportConfigCreate(c.Request().Context(), rc) if err != nil { return httperr.InternalServerError(err) } @@ -85,7 +87,7 @@ func (h *ApiHandler) CreateReportConfig(c echo.Context) error { // @Produce json // @Param project_id path string true "project uuid" Format(uuid) // @Param report_config_id path string true "report config uuid" Format(uuid) -// @Param report_config body model.ReportConfig true "report config payload" +// @Param report_config body dto.ReportConfig true "report config payload" // @Param key query string false "api key" // @Accept application/json // @Success 200 {object} map[string]interface{} @@ -103,18 +105,18 @@ func (h *ApiHandler) UpdateReportConfig(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - var rc model.ReportConfig + var rc dto.ReportConfig if err := c.Bind(&rc); err != nil { return httperr.MalformedBody(err) } rc.ID = rcID rc.ProjectID = pID - profile := c.Get("profile").(model.Profile) + profile := c.Get("profile").(db.VProfile) t := time.Now() - rc.UpdaterID, rc.UpdateDate = &profile.ID, &t + rc.UpdatedBy, rc.UpdatedAt = &profile.ID, &t - if err := h.ReportConfigService.UpdateReportConfig(c.Request().Context(), rc); err != nil { + if err := h.DBService.ReportConfigUpdate(c.Request().Context(), rc); err != nil { return httperr.InternalServerError(err) } @@ -140,7 +142,7 @@ func (h *ApiHandler) DeleteReportConfig(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - if err := h.ReportConfigService.DeleteReportConfig(c.Request().Context(), rcID); err != nil { + if err := h.DBService.ReportConfigDelete(c.Request().Context(), rcID); err != nil { return httperr.InternalServerError(err) } @@ -154,7 +156,7 @@ func (h *ApiHandler) DeleteReportConfig(c echo.Context) error { // @Produce json // @Param report_config_id path string true "report config uuid" Format(uuid) // @Param key query string true "api key" -// @Success 200 {object} model.ReportConfigWithPlotConfigs +// @Success 200 {object} service.ReportConfigWithPlotConfigs // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -164,7 +166,7 @@ func (h *ApiHandler) GetReportConfigWithPlotConfigs(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - rcs, err := h.ReportConfigService.GetReportConfigWithPlotConfigs(c.Request().Context(), rcID) + rcs, err := h.DBService.ReportConfigWithPlotConfigsGet(c.Request().Context(), rcID) if err != nil { return httperr.InternalServerError(err) } @@ -179,7 +181,7 @@ func (h *ApiHandler) GetReportConfigWithPlotConfigs(c echo.Context) error { // @Param report_config_id path string true "report config uuid" Format(uuid) // @Param key query string false "api key" // @Produce application/json -// @Success 201 {object} model.ReportDownloadJob +// @Success 201 {object} db.ReportDownloadJob // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -191,9 +193,14 @@ func (h *ApiHandler) CreateReportDownloadJob(c echo.Context) error { return httperr.MalformedID(err) } isLandscape := strings.ToLower(c.QueryParam("is_landscape")) == "true" - p := c.Get("profile").(model.Profile) + p := c.Get("profile").(db.VProfile) - j, err := h.ReportConfigService.CreateReportDownloadJob(c.Request().Context(), rcID, p.ID, isLandscape) + j, err := h.DBService.ReportDownloadJobCreate(c.Request().Context(), h.PubsubService, service.ReportDownloadJobCreateOpts{ + ReportConfigID: rcID, + ProfileID: p.ID, + IsLandscape: isLandscape, + IsMock: h.Config.AuthJWTMocked, + }) if err != nil { return httperr.InternalServerError(err) } @@ -210,7 +217,7 @@ func (h *ApiHandler) CreateReportDownloadJob(c echo.Context) error { // @Param job_id path string true "download job uuid" Format(uuid) // @Param key query string false "api key" // @Produce application/json -// @Success 200 {object} model.ReportDownloadJob +// @Success 200 {object} db.ReportDownloadJob // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -221,9 +228,12 @@ func (h *ApiHandler) GetReportDownloadJob(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - p := c.Get("profile").(model.Profile) + p := c.Get("profile").(db.VProfile) - j, err := h.ReportConfigService.GetReportDownloadJob(c.Request().Context(), jobID, p.ID) + j, err := h.DBService.ReportDownloadJobGet(c.Request().Context(), db.ReportDownloadJobGetParams{ + ID: jobID, + CreatedBy: p.ID, + }) if err != nil { return httperr.InternalServerError(err) } @@ -236,7 +246,7 @@ func (h *ApiHandler) GetReportDownloadJob(c echo.Context) error { // @Summary updates a job that creates a pdf report // @Tags report-config // @Param job_id path string true "download job uuid" Format(uuid) -// @Param report_download_job body model.ReportDownloadJob true "report download job payload" +// @Param report_download_job body dto.ReportDownloadJob true "report download job payload" // @Param key query string true "api key" // @Accept application/json // @Produce application/json @@ -250,14 +260,14 @@ func (h *ApiHandler) UpdateReportDownloadJob(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - var j model.ReportDownloadJob + var j dto.ReportDownloadJob if err := c.Bind(&j); err != nil { return httperr.MalformedBody(err) } j.ID = jobID - j.ProgressUpdateDate = time.Now() + j.ProgressUpdatedAt = time.Now() - if err := h.ReportConfigService.UpdateReportDownloadJob(c.Request().Context(), j); err != nil { + if err := h.DBService.ReportDownloadJobUpdate(c.Request().Context(), j); err != nil { return httperr.InternalServerError(err) } @@ -283,9 +293,12 @@ func (h *ApiHandler) DownloadReport(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - p := c.Get("profile").(model.Profile) + p := c.Get("profile").(db.VProfile) - j, err := h.ReportConfigService.GetReportDownloadJob(c.Request().Context(), jobID, p.ID) + j, err := h.DBService.ReportDownloadJobGet(c.Request().Context(), db.ReportDownloadJobGetParams{ + ID: jobID, + CreatedBy: p.ID, + }) if err != nil { return httperr.InternalServerError(err) } diff --git a/api/internal/handler/report_config_test.go b/api/internal/handler/report_config_test.go index 4c6b8676..766ca5fd 100644 --- a/api/internal/handler/report_config_test.go +++ b/api/internal/handler/report_config_test.go @@ -46,19 +46,19 @@ var reportConfigSchema = fmt.Sprintf(`{ "project_id": { "type": "string" }, "project_name": { "type": "string" }, "district_name": { "type": ["string", "null"] }, - "creator_id": { "type": "string" }, - "creator_username": { "type": "string" }, - "create_date": { "type": "string", "format": "date-time" }, - "updater_id": { "type": ["string", "null"] }, - "updater_username": { "type": ["string", "null"] }, - "update_date": { "type": ["string", "null"], "format": "date-time" }, + "created_by": { "type": "string" }, + "created_by_username": { "type": "string" }, + "created_at": { "type": "string", "format": "date-time" }, + "updated_by": { "type": ["string", "null"] }, + "updated_by_username": { "type": ["string", "null"] }, + "updated_at": { "type": ["string", "null"], "format": "date-time" }, "global_overrides": %s, "plot_configs": %s }, "additionalProperties": false, "required": [ - "id","slug","name","description","project_id","project_name", "district_name", "creator_id", - "creator_username","create_date","global_overrides","plot_configs" + "id","slug","name","description","project_id","project_name", "district_name", "created_by", + "created_by_username","created_at","global_overrides","plot_configs" ] }`, globalOverridesSchema, IDSlugNameArrSchema) @@ -74,13 +74,13 @@ const reportDownloadJobSchema = `{ "properties": { "id": { "type": "string" }, "report_config_id": { "type": "string" }, - "creator": { "type": "string" }, - "create_date": { "type": "string" }, + "created_by": { "type": "string" }, + "created_at": { "type": "string" }, "status": { "type": "string" }, "file_key": { "type": ["string", "null"] }, "file_expiry": { "type": ["string", "null"] }, "progress": { "type": "number" }, - "progress_update_date": { "type": "string" } + "progress_updated_at": { "type": "string" } } }` diff --git a/api/internal/handler/search.go b/api/internal/handler/search.go index ad9aebf7..c4a20335 100644 --- a/api/internal/handler/search.go +++ b/api/internal/handler/search.go @@ -1,50 +1,41 @@ package handler import ( - "context" - "fmt" - + "github.com/USACE/instrumentation-api/api/internal/db" + "github.com/USACE/instrumentation-api/api/internal/dto" "github.com/USACE/instrumentation-api/api/internal/httperr" - "github.com/USACE/instrumentation-api/api/internal/model" "net/http" "github.com/labstack/echo/v4" ) -type searchFunc func(ctx context.Context, searchText string, limit int) ([]model.SearchResult, error) - -// Search godoc +// ProjectSearch godoc // // @Summary allows searching using a string on different entities // @Tags search // @Produce json // @Param entity path string true "entity to search (i.e. projects, etc.)" // @Param q query string false "search string" -// @Success 200 {array} model.SearchResult +// @Success 200 {array} db.VProject // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError -// @Router /search/{entity} [get] -func (h *ApiHandler) Search(c echo.Context) error { - var fn searchFunc - pfn := &fn - switch entity := c.Param("entity"); entity { - case "projects": - *pfn = h.ProjectService.SearchProjects - default: - return httperr.Message(http.StatusBadRequest, fmt.Sprintf("search not implemented for entity: %s", entity)) - } - +// @Router /search/projects [get] +func (h *ApiHandler) ProjectSearch(c echo.Context) error { searchText := c.QueryParam("q") if searchText == "" { - return c.JSON(http.StatusOK, make([]model.SearchResult, 0)) + return c.JSON(http.StatusOK, make([]dto.SearchResult, 0)) } // Get Desired Number of Results; Hardcode 5 for now; - limit := 5 - rr, err := fn(c.Request().Context(), searchText, limit) + var limit int32 = 5 + ps, err := h.DBService.ProjectListForNameSearch(c.Request().Context(), db.ProjectListForNameSearchParams{ + Name: &searchText, + ResultLimit: limit, + }) + if err != nil { return httperr.InternalServerError(err) } - return c.JSON(http.StatusOK, rr) + return c.JSON(http.StatusOK, ps) } diff --git a/api/internal/handler/submittal.go b/api/internal/handler/submittal.go index 203d888e..d0cd9ac5 100644 --- a/api/internal/handler/submittal.go +++ b/api/internal/handler/submittal.go @@ -4,8 +4,8 @@ import ( "net/http" "strings" + "github.com/USACE/instrumentation-api/api/internal/db" "github.com/USACE/instrumentation-api/api/internal/httperr" - _ "github.com/USACE/instrumentation-api/api/internal/model" "github.com/google/uuid" "github.com/labstack/echo/v4" ) @@ -17,24 +17,27 @@ import ( // @Produce json // @Param project_id path string true "project uuid" Format(uuid) // @Param missing query bool false "filter by missing projects only" -// @Success 200 {array} model.Submittal +// @Success 200 {array} db.VSubmittal // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError // @Router /projects/{project_id}/submittals [get] func (h *ApiHandler) ListProjectSubmittals(c echo.Context) error { - id, err := uuid.Parse(c.Param("project_id")) + pID, err := uuid.Parse(c.Param("project_id")) if err != nil { return httperr.MalformedID(err) } - var fmo bool - mo := c.QueryParam("missing") - if strings.ToLower(mo) == "true" { - fmo = true + var showMissing bool + missingParam := c.QueryParam("missing") + if strings.ToLower(missingParam) == "true" { + showMissing = true } - subs, err := h.SubmittalService.ListProjectSubmittals(c.Request().Context(), id, fmo) + subs, err := h.DBService.SubmittalListForProject(c.Request().Context(), db.SubmittalListForProjectParams{ + ProjectID: pID, + ShowIncompleteMissing: showMissing, + }) if err != nil { return httperr.InternalServerError(err) } @@ -48,24 +51,27 @@ func (h *ApiHandler) ListProjectSubmittals(c echo.Context) error { // @Produce json // @Param instrument_id path string true "instrument uuid" Format(uuid) // @Param missing query bool false "filter by missing projects only" -// @Success 200 {array} model.Submittal +// @Success 200 {array} db.VSubmittal // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError // @Router /instruments/{instrument_id}/submittals [get] func (h *ApiHandler) ListInstrumentSubmittals(c echo.Context) error { - id, err := uuid.Parse(c.Param("instrument_id")) + instID, err := uuid.Parse(c.Param("instrument_id")) if err != nil { return httperr.MalformedID(err) } - var fmo bool - mo := c.QueryParam("missing") - if strings.ToLower(mo) == "true" { - fmo = true + var showMissing bool + missingParam := c.QueryParam("missing") + if strings.ToLower(missingParam) == "true" { + showMissing = true } - subs, err := h.SubmittalService.ListInstrumentSubmittals(c.Request().Context(), id, fmo) + subs, err := h.DBService.SubmittalListForInstrument(c.Request().Context(), db.SubmittalListForInstrumentParams{ + InstrumentID: instID, + ShowIncompleteMissing: showMissing, + }) if err != nil { return httperr.InternalServerError(err) } @@ -78,24 +84,27 @@ func (h *ApiHandler) ListInstrumentSubmittals(c echo.Context) error { // @Tags submittal // @Produce json // @Param alert_config_id path string true "alert config uuid" Format(uuid) -// @Success 200 {array} model.Submittal +// @Success 200 {array} db.VSubmittal // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError // @Router /alert_configs/{alert_config_id}/submittals [get] func (h *ApiHandler) ListAlertConfigSubmittals(c echo.Context) error { - id, err := uuid.Parse(c.Param("alert_config_id")) + acID, err := uuid.Parse(c.Param("alert_config_id")) if err != nil { return httperr.MalformedID(err) } - var fmo bool - mo := c.QueryParam("missing") - if strings.ToLower(mo) == "true" { - fmo = true + var showMissing bool + missingParam := c.QueryParam("missing") + if strings.ToLower(missingParam) == "true" { + showMissing = true } - subs, err := h.SubmittalService.ListAlertConfigSubmittals(c.Request().Context(), id, fmo) + subs, err := h.DBService.SubmittalListForAlertConfig(c.Request().Context(), db.SubmittalListForAlertConfigParams{ + AlertConfigID: acID, + ShowIncompleteMissing: showMissing, + }) if err != nil { return httperr.InternalServerError(err) } @@ -116,14 +125,14 @@ func (h *ApiHandler) ListAlertConfigSubmittals(c echo.Context) error { // @Router /submittals/{submittal_id}/verify_missing [put] // @Security Bearer func (h *ApiHandler) VerifyMissingSubmittal(c echo.Context) error { - id, err := uuid.Parse(c.Param("submittal_id")) + subID, err := uuid.Parse(c.Param("submittal_id")) if err != nil { return httperr.MalformedID(err) } - if err := h.SubmittalService.VerifyMissingSubmittal(c.Request().Context(), id); err != nil { + if err := h.DBService.SubmittalUpdateVerifyMissing(c.Request().Context(), subID); err != nil { return httperr.ServerErrorOrNotFound(err) } - return c.JSON(http.StatusOK, map[string]interface{}{"submittal_id": id}) + return c.JSON(http.StatusOK, map[string]interface{}{"submittal_id": subID}) } // VerifyMissingAlertConfigSubmittals godoc @@ -140,12 +149,12 @@ func (h *ApiHandler) VerifyMissingSubmittal(c echo.Context) error { // @Router /alert_configs/{alert_config_id}/submittals/verify_missing [put] // @Security Bearer func (h *ApiHandler) VerifyMissingAlertConfigSubmittals(c echo.Context) error { - id, err := uuid.Parse(c.Param("alert_config_id")) + acID, err := uuid.Parse(c.Param("alert_config_id")) if err != nil { return httperr.MalformedID(err) } - if err := h.SubmittalService.VerifyMissingAlertConfigSubmittals(c.Request().Context(), id); err != nil { + if err := h.DBService.SubmittalUpdateVerifyMissingForAlertConfig(c.Request().Context(), &acID); err != nil { return httperr.InternalServerError(err) } - return c.JSON(http.StatusOK, map[string]interface{}{"alert_config_id": id}) + return c.JSON(http.StatusOK, map[string]interface{}{"alert_config_id": acID}) } diff --git a/api/internal/handler/submittal_test.go b/api/internal/handler/submittal_test.go index b8c0d58a..03d38072 100644 --- a/api/internal/handler/submittal_test.go +++ b/api/internal/handler/submittal_test.go @@ -20,9 +20,9 @@ const submittalSchema = `{ "project_id": { "type": "string" }, "submittal_status_id": { "type": "string" }, "submittal_status_name": { "type": "string" }, - "create_date": { "type": "string", "format": "date-time" }, - "due_date": { "type": "string", "format": "date-time" }, - "completion_date": { "type": ["string", "null"], "format": "date-time" }, + "created_at": { "type": "string", "format": "date-time" }, + "due_at": { "type": "string", "format": "date-time" }, + "completed_at": { "type": ["string", "null"], "format": "date-time" }, "marked_as_missing": { "type": "boolean" }, "warning_sent": { "type": "boolean" } }, diff --git a/api/internal/handler/timeseries.go b/api/internal/handler/timeseries.go index 55a6c5fe..a5481243 100644 --- a/api/internal/handler/timeseries.go +++ b/api/internal/handler/timeseries.go @@ -1,8 +1,9 @@ package handler import ( + _ "github.com/USACE/instrumentation-api/api/internal/db" + "github.com/USACE/instrumentation-api/api/internal/dto" "github.com/USACE/instrumentation-api/api/internal/httperr" - "github.com/USACE/instrumentation-api/api/internal/model" "net/http" @@ -17,7 +18,7 @@ import ( // @Produce json // @Param timeseries_id path string true "timeseries uuid" Format(uuid) // @Param instrument_id path string true "instrument uuid" Format(uuid) -// @Success 200 {object} model.Timeseries +// @Success 200 {object} db.VTimeseries // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -28,7 +29,7 @@ func (h *ApiHandler) GetTimeseries(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - t, err := h.TimeseriesService.GetTimeseries(c.Request().Context(), tsID) + t, err := h.DBService.TimeseriesGet(c.Request().Context(), tsID) if err != nil { return httperr.InternalServerError(err) } @@ -42,7 +43,7 @@ func (h *ApiHandler) GetTimeseries(c echo.Context) error { // @Produce json // @Param project_id path string true "project uuid" Format(uuid) // @Param instrument_id path string true "instrument uuid" Format(uuid) -// @Success 200 {array} model.Timeseries +// @Success 200 {array} db.VTimeseries // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -52,7 +53,7 @@ func (h *ApiHandler) ListInstrumentTimeseries(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - tt, err := h.TimeseriesService.ListInstrumentTimeseries(c.Request().Context(), nID) + tt, err := h.DBService.TimeseriesListForInstrument(c.Request().Context(), nID) if err != nil { return httperr.InternalServerError(err) } @@ -65,7 +66,7 @@ func (h *ApiHandler) ListInstrumentTimeseries(c echo.Context) error { // @Tags timeseries // @Produce json // @Param instrument_group_id path string true "instrument group uuid" Format(uuid) -// @Success 200 {array} model.Timeseries +// @Success 200 {array} db.VTimeseries // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -75,7 +76,7 @@ func (h *ApiHandler) ListInstrumentGroupTimeseries(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - tt, err := h.TimeseriesService.ListInstrumentGroupTimeseries(c.Request().Context(), gID) + tt, err := h.DBService.TimeseriesListForInstrumentGroup(c.Request().Context(), gID) if err != nil { return httperr.InternalServerError(err) } @@ -88,7 +89,7 @@ func (h *ApiHandler) ListInstrumentGroupTimeseries(c echo.Context) error { // @Tags timeseries // @Produce json // @Param project_id path string true "project uuid" Format(uuid) -// @Success 200 {array} model.Timeseries +// @Success 200 {array} db.VTimeseries // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -98,7 +99,7 @@ func (h *ApiHandler) ListProjectTimeseries(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - tt, err := h.TimeseriesService.ListProjectTimeseries(c.Request().Context(), pID) + tt, err := h.DBService.TimeseriesListForProject(c.Request().Context(), pID) if err != nil { return httperr.InternalServerError(err) } @@ -110,24 +111,23 @@ func (h *ApiHandler) ListProjectTimeseries(c echo.Context) error { // @Summary creates one or more timeseries // @Tags timeseries // @Produce json -// @Param timeseries_collection_items body model.TimeseriesCollectionItems true "timeseries collection items payload" +// @Param timeseries_collection_items body dto.TimeseriesCollectionItems true "timeseries collection items payload" // @Param key query string false "api key" -// @Success 200 {array} map[string]uuid.UUID +// @Success 200 {object} map[string]uuid.UUID // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError // @Router /timeseries [post] // @Security Bearer func (h *ApiHandler) CreateTimeseries(c echo.Context) error { - var tc model.TimeseriesCollectionItems + var tc dto.TimeseriesCollectionItems if err := c.Bind(&tc); err != nil { return httperr.MalformedBody(err) } - tt, err := h.TimeseriesService.CreateTimeseriesBatch(c.Request().Context(), tc.Items) - if err != nil { + if err := h.DBService.TimeseriesCreateBatch(c.Request().Context(), tc.Items); err != nil { return httperr.InternalServerError(err) } - return c.JSON(http.StatusCreated, tt) + return c.NoContent(http.StatusCreated) } // UpdateTimeseries godoc @@ -136,9 +136,9 @@ func (h *ApiHandler) CreateTimeseries(c echo.Context) error { // @Tags timeseries // @Produce json // @Param timeseries_id path string true "timeseries uuid" Format(uuid) -// @Param timeseries body model.Timeseries true "timeseries payload" +// @Param timeseries body dto.Timeseries true "timeseries payload" // @Param key query string false "api key" -// @Success 200 {object} map[string]uuid.UUID +// @Success 200 {object} dto.Timeseries // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -149,12 +149,12 @@ func (h *ApiHandler) UpdateTimeseries(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - t := model.Timeseries{} + var t dto.Timeseries if err := c.Bind(&t); err != nil { return httperr.MalformedBody(err) } t.ID = id - if _, err := h.TimeseriesService.UpdateTimeseries(c.Request().Context(), t); err != nil { + if err := h.DBService.TimeseriesUpdate(c.Request().Context(), t); err != nil { return httperr.InternalServerError(err) } return c.JSON(http.StatusOK, t) @@ -178,8 +178,8 @@ func (h *ApiHandler) DeleteTimeseries(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - if err := h.TimeseriesService.DeleteTimeseries(c.Request().Context(), id); err != nil { + if err := h.DBService.TimeseriesDelete(c.Request().Context(), id); err != nil { return httperr.InternalServerError(err) } - return c.JSON(http.StatusOK, make(map[string]interface{})) + return c.NoContent(http.StatusOK) } diff --git a/api/internal/handler/timeseries_calculated.go b/api/internal/handler/timeseries_calculated.go index 12fa2b1e..201ef948 100644 --- a/api/internal/handler/timeseries_calculated.go +++ b/api/internal/handler/timeseries_calculated.go @@ -6,8 +6,9 @@ import ( "github.com/google/uuid" "github.com/labstack/echo/v4" + _ "github.com/USACE/instrumentation-api/api/internal/db" + "github.com/USACE/instrumentation-api/api/internal/dto" "github.com/USACE/instrumentation-api/api/internal/httperr" - "github.com/USACE/instrumentation-api/api/internal/model" ) // GetInstrumentCalculations godoc @@ -15,7 +16,7 @@ import ( // @Summary lists calculations associated with an instrument // @Tags formula // @Produce json -// @Success 200 {array} model.CalculatedTimeseries +// @Success 200 {array} db.TimeseriesComputedListForInstrumentRow // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -29,7 +30,7 @@ func (h *ApiHandler) GetInstrumentCalculations(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - formulas, err := h.CalculatedTimeseriesService.GetAllCalculatedTimeseriesForInstrument(c.Request().Context(), instrumentID) + formulas, err := h.DBService.TimeseriesComputedListForInstrument(c.Request().Context(), &instrumentID) if err != nil { return httperr.InternalServerError(err) } @@ -42,14 +43,14 @@ func (h *ApiHandler) GetInstrumentCalculations(c echo.Context) error { // @Tags formula // @Produce json // @Param key query string false "api key" -// @Success 200 {object} map[string]interface{} +// @Success 201 {object} map[string]interface{} // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError // @Router /formulas [post] // @Security Bearer func (h *ApiHandler) CreateCalculation(c echo.Context) error { - var formula model.CalculatedTimeseries + var formula dto.CalculatedTimeseries if err := c.Bind(&formula); err != nil { return httperr.MalformedBody(err) } @@ -58,7 +59,7 @@ func (h *ApiHandler) CreateCalculation(c echo.Context) error { formula.FormulaName = formula.Formula } - if err := h.CalculatedTimeseriesService.CreateCalculatedTimeseries(c.Request().Context(), formula); err != nil { + if err := h.DBService.TimeseriesComputedCreate(c.Request().Context(), formula); err != nil { return httperr.InternalServerError(err) } return c.JSON(http.StatusOK, map[string]interface{}{"id": formula.ID}) @@ -71,7 +72,7 @@ func (h *ApiHandler) CreateCalculation(c echo.Context) error { // @Produce json // @Param formula_id path string true "formula uuid" Format(uuid) // @Param key query string false "api key" -// @Success 200 {array} model.CalculatedTimeseries +// @Success 200 {array} dto.CalculatedTimeseries // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -83,7 +84,7 @@ func (h *ApiHandler) UpdateCalculation(c echo.Context) error { return httperr.MalformedID(err) } - var formula model.CalculatedTimeseries + var formula dto.CalculatedTimeseries if err := c.Bind(&formula); err != nil { return httperr.MalformedBody(err) } @@ -93,7 +94,7 @@ func (h *ApiHandler) UpdateCalculation(c echo.Context) error { formula.FormulaName = formula.Formula } - if err := h.CalculatedTimeseriesService.UpdateCalculatedTimeseries(c.Request().Context(), formula); err != nil { + if err := h.DBService.TimeseriesComputedUpdate(c.Request().Context(), formula); err != nil { return httperr.InternalServerError(err) } return c.JSON(http.StatusOK, formula) @@ -117,7 +118,7 @@ func (h *ApiHandler) DeleteCalculation(c echo.Context) error { if err != nil { return httperr.MalformedID(err) } - if err := h.CalculatedTimeseriesService.DeleteCalculatedTimeseries(c.Request().Context(), calculationID); err != nil { + if err := h.DBService.TimeseriesComputedDelete(c.Request().Context(), calculationID); err != nil { return httperr.InternalServerError(err) } return c.JSON(http.StatusOK, make(map[string]interface{})) diff --git a/api/internal/handler/timeseries_cwms.go b/api/internal/handler/timeseries_cwms.go index 19f21b9a..148b5f03 100644 --- a/api/internal/handler/timeseries_cwms.go +++ b/api/internal/handler/timeseries_cwms.go @@ -3,9 +3,9 @@ package handler import ( "net/http" + _ "github.com/USACE/instrumentation-api/api/internal/db" + "github.com/USACE/instrumentation-api/api/internal/dto" "github.com/USACE/instrumentation-api/api/internal/httperr" - "github.com/USACE/instrumentation-api/api/internal/model" - _ "github.com/USACE/instrumentation-api/api/internal/model" "github.com/google/uuid" "github.com/labstack/echo/v4" ) @@ -17,7 +17,7 @@ import ( // @Produce json // @Param project_id path string true "project uuid" Format(uuid) // @Param instrument_id path string true "instrument uuid" Format(uuid) -// @Success 200 {array} model.TimeseriesCwms +// @Success 200 {array} db.VTimeseriesCwms // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -32,7 +32,7 @@ func (h *ApiHandler) ListTimeseriesCwms(c echo.Context) error { return httperr.MalformedID(err) } - tss, err := h.TimeseriesCwmsService.ListTimeseriesCwms(c.Request().Context(), instrumentID) + tss, err := h.DBService.TimeseriesCwmsList(c.Request().Context(), instrumentID) if err != nil { return httperr.InternalServerError(err) } @@ -47,8 +47,8 @@ func (h *ApiHandler) ListTimeseriesCwms(c echo.Context) error { // @Produce json // @Param project_id path string true "project uuid" Format(uuid) // @Param instrument_id path string true "instrument uuid" Format(uuid) -// @Param timeseries_cwms_arr body []model.TimeseriesCwms true "array of cwms timeseries to create" -// @Success 200 {array} model.TimeseriesCwms +// @Param timeseries_cwms_arr body []dto.TimeseriesCwms true "array of cwms timeseries to create" +// @Success 201 {object} map[string]interface{} // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -63,17 +63,16 @@ func (h *ApiHandler) CreateTimeseriesCwms(c echo.Context) error { return httperr.MalformedID(err) } - var tcc []model.TimeseriesCwms + var tcc []dto.TimeseriesCwms if err := c.Bind(&tcc); err != nil { return httperr.MalformedBody(err) } - tss, err := h.TimeseriesCwmsService.CreateTimeseriesCwmsBatch(c.Request().Context(), instrumentID, tcc) - if err != nil { + if err := h.DBService.TimeseriesCwmsCreateBatch(c.Request().Context(), instrumentID, tcc); err != nil { return httperr.InternalServerError(err) } - return c.JSON(http.StatusCreated, tss) + return c.JSON(http.StatusCreated, map[string]interface{}{"instrument_id": instrumentID}) } // UpdateTimeseriesCwms godoc @@ -84,8 +83,8 @@ func (h *ApiHandler) CreateTimeseriesCwms(c echo.Context) error { // @Param project_id path string true "project uuid" Format(uuid) // @Param instrument_id path string true "instrument uuid" Format(uuid) // @Param timeseries_id path string true "timeseries uuid" Format(uuid) -// @Param timeseries_cwms body model.TimeseriesCwms true "cwms timeseries to update" -// @Success 200 {array} model.TimeseriesCwms +// @Param timeseries_cwms body dto.TimeseriesCwms true "cwms timeseries to update" +// @Success 200 {array} map[string]interface{} // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -104,14 +103,14 @@ func (h *ApiHandler) UpdateTimeseriesCwms(c echo.Context) error { return httperr.MalformedID(err) } - var tc model.TimeseriesCwms + var tc dto.TimeseriesCwms if err := c.Bind(&tc); err != nil { return httperr.MalformedBody(err) } tc.InstrumentID = instrumentID tc.ID = timeseriesID - if err := h.TimeseriesCwmsService.UpdateTimeseriesCwms(c.Request().Context(), tc); err != nil { + if err := h.DBService.TimeseriesCwmsUpdate(c.Request().Context(), tc); err != nil { return httperr.InternalServerError(err) } diff --git a/api/internal/handler/timeseries_cwms_test.go b/api/internal/handler/timeseries_cwms_test.go index fcf3690e..64ff47fb 100644 --- a/api/internal/handler/timeseries_cwms_test.go +++ b/api/internal/handler/timeseries_cwms_test.go @@ -99,7 +99,6 @@ func TestTimeseriesCwms(t *testing.T) { Method: http.MethodPost, Body: createTimeseriesCwmsArrayBody, ExpectedStatus: http.StatusCreated, - ExpectedSchema: arrSchema, }, { Name: "UpdateTimeseries", diff --git a/api/internal/handler/timeseries_process.go b/api/internal/handler/timeseries_process.go index 1819689d..d6c937ae 100644 --- a/api/internal/handler/timeseries_process.go +++ b/api/internal/handler/timeseries_process.go @@ -5,8 +5,9 @@ import ( "strconv" "time" + "github.com/USACE/instrumentation-api/api/internal/db" "github.com/USACE/instrumentation-api/api/internal/httperr" - "github.com/USACE/instrumentation-api/api/internal/model" + "github.com/USACE/instrumentation-api/api/internal/util" "github.com/google/uuid" "github.com/labstack/echo/v4" ) @@ -20,7 +21,7 @@ const ( explorerRequest ) -// ListTimeseriesMeasurementsByTimeseries godoc +// ListTimeseriesMeasurementsForTimeseries godoc // // @Summary lists timeseries by timeseries uuid // @Tags timeseries @@ -30,25 +31,25 @@ const ( // @Param after query string false "after time" Format(date-time) // @param before query string false "before time" Format(date-time) // @Param threshold query number false "downsample threshold" -// @Success 200 {object} model.MeasurementCollection +// @Success 200 {array} db.MeasurementCollection // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError // @Router /timeseries/{timeseries_id}/measurements [get] // @Router /instruments/{instrument_id}/timeseries/{timeseries_id}/measurements [get] -func (h *ApiHandler) ListTimeseriesMeasurementsByTimeseries(c echo.Context) error { +func (h *ApiHandler) ListTimeseriesMeasurementsForTimeseries(c echo.Context) error { tsID, err := uuid.Parse(c.Param("timeseries_id")) if err != nil { return httperr.MalformedID(err) } - isStored, err := h.TimeseriesService.GetStoredTimeseriesExists(c.Request().Context(), tsID) + isStored, err := h.DBService.TimeseriesGetExistsStored(c.Request().Context(), tsID) if err != nil { return httperr.InternalServerError(err) } if isStored { - var tw model.TimeWindow + var tw util.TimeWindow a, b := c.QueryParam("after"), c.QueryParam("before") if err := tw.SetWindow(a, b, time.Now().AddDate(0, 0, -7), time.Now()); err != nil { return httperr.MalformedDate(err) @@ -65,14 +66,19 @@ func (h *ApiHandler) ListTimeseriesMeasurementsByTimeseries(c echo.Context) erro threshold = tr } - resBody, err := h.MeasurementService.ListTimeseriesMeasurements(c.Request().Context(), tsID, tw, threshold) + mc, err := h.DBService.TimeseriesMeasurementCollectionGetForRange(c.Request().Context(), db.TimeseriesMeasurementCollectionGetForRangeParams{ + TimeseriesID: tsID, + After: tw.After, + Before: tw.Before, + Threshold: threshold, + }) if err != nil { return httperr.InternalServerError(err) } - return c.JSON(http.StatusOK, resBody) + return c.JSON(http.StatusOK, mc) } - f := model.ProcessMeasurementFilter{TimeseriesID: &tsID} + f := db.ProcessMeasurementFilter{TimeseriesID: &tsID} selectMeasurements := selectMeasurementsHandler(h, f, byTimeseriesRequest) return selectMeasurements(c) @@ -87,7 +93,7 @@ func (h *ApiHandler) ListTimeseriesMeasurementsByTimeseries(c echo.Context) erro // @Param after query string false "after time" Format(date-time) // @Param before query string false "before time" Format(date-time) // @Param threshold query number false "downsample threshold" -// @Success 200 {object} model.MeasurementCollection +// @Success 200 {object} map[uuid.UUID][]db.MeasurementCollectionLean // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -97,7 +103,7 @@ func (h *ApiHandler) ListTimeseriesMeasurementsByInstrument(c echo.Context) erro if err != nil { return httperr.MalformedID(err) } - f := model.ProcessMeasurementFilter{InstrumentID: &iID} + f := db.ProcessMeasurementFilter{InstrumentID: &iID} selectMeasurements := selectMeasurementsHandler(h, f, byInstrumentRequest) return selectMeasurements(c) @@ -109,7 +115,7 @@ func (h *ApiHandler) ListTimeseriesMeasurementsByInstrument(c echo.Context) erro // @Tags timeseries // @Produce json // @Param instrument_group_id path string true "instrument group uuid" Format(uuid) -// @Success 200 {object} model.MeasurementCollection +// @Success 200 {object} map[uuid.UUID][]db.MeasurementCollectionLean // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -119,7 +125,7 @@ func (h *ApiHandler) ListTimeseriesMeasurementsByInstrumentGroup(c echo.Context) if err != nil { return httperr.MalformedID(err) } - f := model.ProcessMeasurementFilter{InstrumentGroupID: &igID} + f := db.ProcessMeasurementFilter{InstrumentGroupID: &igID} selectMeasurements := selectMeasurementsHandler(h, f, byInstrumentGroupRequest) return selectMeasurements(c) @@ -132,7 +138,7 @@ func (h *ApiHandler) ListTimeseriesMeasurementsByInstrumentGroup(c echo.Context) // @Accept json // @Produce json // @Param instrument_ids body []uuid.UUID true "array of instrument uuids" -// @Success 200 {array} map[uuid.UUID]model.MeasurementCollectionLean +// @Success 200 {array} map[uuid.UUID][]db.MeasurementCollectionLean // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError @@ -142,38 +148,15 @@ func (h *ApiHandler) ListTimeseriesMeasurementsExplorer(c echo.Context) error { if err := (&echo.DefaultBinder{}).BindBody(c, &iIDs); err != nil { return httperr.MalformedBody(err) } - f := model.ProcessMeasurementFilter{InstrumentIDs: iIDs} + f := db.ProcessMeasurementFilter{InstrumentIDs: iIDs} selectMeasurements := selectMeasurementsHandler(h, f, explorerRequest) return selectMeasurements(c) } -// ListInclinometerTimeseriesMeasurementsExplorer godoc -// -// @Summary list inclinometer timeseries measurements for explorer page -// @Tags explorer -// @Accept json -// @Produce json -// @Param instrument_ids body []uuid.UUID true "array of inclinometer instrument uuids" -// @Success 200 {array} map[uuid.UUID]model.InclinometerMeasurementCollectionLean -// @Failure 400 {object} echo.HTTPError -// @Failure 404 {object} echo.HTTPError -// @Failure 500 {object} echo.HTTPError -// @Router /inclinometer_explorer [post] -func (h *ApiHandler) ListInclinometerTimeseriesMeasurementsExplorer(c echo.Context) error { - var iIDs []uuid.UUID - if err := (&echo.DefaultBinder{}).BindBody(c, &iIDs); err != nil { - return httperr.MalformedBody(err) - } - f := model.ProcessMeasurementFilter{InstrumentIDs: iIDs} - - selectMeasurements := selectInclinometerMeasurementsHandler(h, f) - return selectMeasurements(c) -} - -func selectMeasurementsHandler(h *ApiHandler, f model.ProcessMeasurementFilter, requestType processTimeseriesType) echo.HandlerFunc { +func selectMeasurementsHandler(h *ApiHandler, f db.ProcessMeasurementFilter, requestType processTimeseriesType) echo.HandlerFunc { return func(c echo.Context) error { - var tw model.TimeWindow + var tw util.TimeWindow a, b := c.QueryParam("after"), c.QueryParam("before") if err := tw.SetWindow(a, b, time.Now().AddDate(0, 0, -7), time.Now()); err != nil { return httperr.MalformedDate(err) @@ -193,7 +176,7 @@ func selectMeasurementsHandler(h *ApiHandler, f model.ProcessMeasurementFilter, threshold = tr } - mrc, err := h.ProcessTimeseriesService.SelectMeasurements(c.Request().Context(), f) + mrc, err := h.DBService.ProcessMeasurementListDynamic(c.Request().Context(), f) if err != nil { return httperr.InternalServerError(err) } @@ -214,25 +197,3 @@ func selectMeasurementsHandler(h *ApiHandler, f model.ProcessMeasurementFilter, } } } - -func selectInclinometerMeasurementsHandler(h *ApiHandler, f model.ProcessMeasurementFilter) echo.HandlerFunc { - return func(c echo.Context) error { - var tw model.TimeWindow - a, b := c.QueryParam("after"), c.QueryParam("before") - if err := tw.SetWindow(a, b, time.Now().AddDate(0, 0, -7), time.Now()); err != nil { - return httperr.MalformedDate(err) - } - - f.After = tw.After - f.Before = tw.Before - - mrc, err := h.ProcessTimeseriesService.SelectInclinometerMeasurements(c.Request().Context(), f) - if err != nil { - return httperr.InternalServerError(err) - } - - resBody, err := mrc.GroupByInstrument() - - return c.JSON(http.StatusOK, resBody) - } -} diff --git a/api/internal/handler/timeseries_test.go b/api/internal/handler/timeseries_test.go index 3b4bb791..7d3bbca9 100644 --- a/api/internal/handler/timeseries_test.go +++ b/api/internal/handler/timeseries_test.go @@ -26,7 +26,7 @@ const timeseriesSchema = `{ "is_computed": { "type": "boolean" }, "type": { "type": "string" } }, - "required": ["id", "slug", "name", "variable", "instrument_id", "parameter_id", "unit_id", "is_computed", "type"], + "required": ["id", "slug", "name", "instrument_id", "parameter_id", "unit_id", "type"], "additionalProperties": false }` @@ -113,7 +113,6 @@ func TestTimeseries(t *testing.T) { Method: http.MethodPost, Body: createTimeseriesObjectBody, ExpectedStatus: http.StatusCreated, - ExpectedSchema: arrSchema, }, { Name: "CreateTimeseries_Array", @@ -121,7 +120,6 @@ func TestTimeseries(t *testing.T) { Method: http.MethodPost, Body: createTimeseriesArrayBody, ExpectedStatus: http.StatusCreated, - ExpectedSchema: arrSchema, }, { Name: "UpdateTimeseries", diff --git a/api/internal/handler/unit.go b/api/internal/handler/unit.go index 27028bee..f2ca57b0 100644 --- a/api/internal/handler/unit.go +++ b/api/internal/handler/unit.go @@ -3,8 +3,8 @@ package handler import ( "net/http" + _ "github.com/USACE/instrumentation-api/api/internal/db" "github.com/USACE/instrumentation-api/api/internal/httperr" - _ "github.com/USACE/instrumentation-api/api/internal/model" "github.com/labstack/echo/v4" ) @@ -13,11 +13,11 @@ import ( // @Summary lists the available units // @Tags unit // @Produce json -// @Success 200 {array} model.Unit +// @Success 200 {array} db.VUnit // @Failure 400 {object} echo.HTTPError // @Router /units [get] func (h *ApiHandler) ListUnits(c echo.Context) error { - uu, err := h.UnitService.ListUnits(c.Request().Context()) + uu, err := h.DBService.UnitsList(c.Request().Context()) if err != nil { return httperr.InternalServerError(err) } diff --git a/api/internal/handler/uploader.go b/api/internal/handler/uploader.go new file mode 100644 index 00000000..b7668072 --- /dev/null +++ b/api/internal/handler/uploader.go @@ -0,0 +1,278 @@ +package handler + +import ( + "net/http" + "time" + + "github.com/USACE/instrumentation-api/api/internal/db" + _ "github.com/USACE/instrumentation-api/api/internal/db" + "github.com/USACE/instrumentation-api/api/internal/dto" + "github.com/USACE/instrumentation-api/api/internal/httperr" + "github.com/google/uuid" + "github.com/labstack/echo/v4" +) + +// ListUploaderConfigsForProject godoc +// +// @Summary lists uploader configs for a project +// @Tags uploader +// @Produce json +// @Param project_id path string true "project uuid" Format(uuid) +// @Success 200 {array} db.VUploaderConfig +// @Failure 400 {object} echo.HTTPError +// @Router /projects/{project_id}/uploader_configs [get] +func (h *ApiHandler) ListUploaderConfigsForProject(c echo.Context) error { + projectID, err := uuid.Parse(c.Param("project_id")) + if err != nil { + return httperr.MalformedID(err) + } + uu, err := h.DBService.UploaderConfigListForProject(c.Request().Context(), projectID) + if err != nil { + return httperr.InternalServerError(err) + } + return c.JSON(http.StatusOK, uu) +} + +// ListUploaderConfigMappings godoc +// +// @Summary lists timeseries mappings for an uploader config +// @Tags uploader +// @Produce json +// @Param project_id path string true "project uuid" Format(uuid) +// @Param uploader_config_id path string true "uploader config uuid" Format(uuid) +// @Success 200 {array} db.UploaderConfigMapping +// @Failure 400 {object} echo.HTTPError +// @Router /projects/{project_id}/uploader_configs/{uploader_config_id}/mappings [get] +func (h *ApiHandler) ListUploaderConfigMappings(c echo.Context) error { + _, err := uuid.Parse(c.Param("project_id")) + if err != nil { + return httperr.MalformedID(err) + } + ucID, err := uuid.Parse(c.Param("uploader_config_id")) + if err != nil { + return httperr.MalformedID(err) + } + mm, err := h.DBService.UploaderConfigMappingList(c.Request().Context(), ucID) + if err != nil { + return httperr.InternalServerError(err) + } + return c.JSON(http.StatusOK, mm) +} + +// CreateUploaderConfig godoc +// +// @Summary creates an uploader config +// @Tags uploader +// @Produce json +// @Param project_id path string true "project uuid" Format(uuid) +// @Param uploader_config body dto.UploaderConfig true "uploader config payload" +// @Success 201 {object} map[string]interface{} +// @Failure 400 {object} echo.HTTPError +// @Router /projects/{project_id}/uploader_configs [post] +func (h *ApiHandler) CreateUploaderConfig(c echo.Context) error { + projectID, err := uuid.Parse(c.Param("project_id")) + if err != nil { + return httperr.MalformedID(err) + } + var uc dto.UploaderConfig + if err := c.Bind(&uc); err != nil { + return httperr.MalformedBody(err) + } + + profile := c.Get("profile").(db.VProfile) + + uc.CreatedBy = profile.ID + uc.CreatedAt = time.Now() + uc.ProjectID = projectID + + newID, err := h.DBService.UploaderConfigCreate(c.Request().Context(), uc) + if err != nil { + return httperr.InternalServerError(err) + } + return c.JSON(http.StatusCreated, map[string]interface{}{"id": newID}) +} + +// UpdateUploaderConfig godoc +// +// @Summary updates an uploader config +// @Tags uploader +// @Produce json +// @Param project_id path string true "project uuid" Format(uuid) +// @Param uploader_config_id path string true "uploader config uuid" Format(uuid) +// @Param uploader_config body dto.UploaderConfig true "uploader config payload" +// @Success 200 {object} map[string]interface{} +// @Failure 400 {object} echo.HTTPError +// @Router /projects/{project_id}/uploader_configs/{uploader_config_id} [put] +func (h *ApiHandler) UpdateUploaderConfig(c echo.Context) error { + projectID, err := uuid.Parse(c.Param("project_id")) + if err != nil { + return httperr.MalformedID(err) + } + ucID, err := uuid.Parse(c.Param("uploader_config_id")) + if err != nil { + return httperr.MalformedID(err) + } + var uc dto.UploaderConfig + if err := c.Bind(&uc); err != nil { + return httperr.MalformedBody(err) + } + + profile := c.Get("profile").(db.VProfile) + + t := time.Now() + uc.UpdatedBy = &profile.ID + uc.UpdatedAt = &t + uc.ProjectID = projectID + uc.ID = ucID + + if err := h.DBService.UploaderConfigUpdate(c.Request().Context(), uc); err != nil { + return httperr.InternalServerError(err) + } + return c.JSON(http.StatusOK, map[string]interface{}{"id": ucID}) +} + +// DeleteUploaderConfig godoc +// +// @Summary deletes an uploader config +// @Tags uploader +// @Produce json +// @Param project_id path string true "project uuid" Format(uuid) +// @Param uploader_config_id path string true "uploader config uuid" Format(uuid) +// @Success 200 {object} map[string]interface{} +// @Failure 400 {object} echo.HTTPError +// @Router /projects/{project_id}/uploader_configs/{uploader_config_id} [delete] +func (h *ApiHandler) DeleteUploaderConfig(c echo.Context) error { + _, err := uuid.Parse(c.Param("project_id")) + if err != nil { + return httperr.MalformedID(err) + } + ucID, err := uuid.Parse(c.Param("uploader_config_id")) + if err != nil { + return httperr.MalformedID(err) + } + if err := h.DBService.UploaderConfigDelete(c.Request().Context(), ucID); err != nil { + return httperr.InternalServerError(err) + } + return c.JSON(http.StatusOK, map[string]interface{}{"id": ucID}) +} + +// CreateUploaderConfigMappings godoc +// +// @Summary creates mappings for an uploader config +// @Tags uploader +// @Produce json +// @Param project_id path string true "project uuid" Format(uuid) +// @Param uploader_config_id path string true "uploader config uuid" Format(uuid) +// @Param uploader_config_mappings body []dto.UploaderConfigMapping true "uploader config mappings payload" +// @Success 201 {object} map[string]interface{} +// @Failure 400 {object} echo.HTTPError +// @Router /projects/{project_id}/uploader_configs/{uploader_config_id}/mappings [post] +func (h *ApiHandler) CreateUploaderConfigMappings(c echo.Context) error { + _, err := uuid.Parse(c.Param("project_id")) + if err != nil { + return httperr.MalformedID(err) + } + ucID, err := uuid.Parse(c.Param("uploader_config_id")) + if err != nil { + return httperr.MalformedID(err) + } + mm := make([]dto.UploaderConfigMapping, 0) + if err := c.Bind(&mm); err != nil { + return httperr.MalformedBody(err) + } + if err := h.DBService.UploaderConfigMappingCreateBatch(c.Request().Context(), ucID, mm); err != nil { + return httperr.InternalServerError(err) + } + return c.JSON(http.StatusCreated, map[string]interface{}{"id": ucID}) +} + +// UpdateUploaderConfigMappings godoc +// +// @Summary updates mappings for an uploader config +// @Tags uploader +// @Produce json +// @Param project_id path string true "project uuid" Format(uuid) +// @Param uploader_config_id path string true "uploader config uuid" Format(uuid) +// @Param uploader_config_mappings body []dto.UploaderConfigMapping true "uploader config mappings payload" +// @Success 200 {object} map[string]interface{} +// @Failure 400 {object} echo.HTTPError +// @Router /projects/{project_id}/uploader_configs/{uploader_config_id}/mappings [put] +func (h *ApiHandler) UpdateUploaderConfigMappings(c echo.Context) error { + _, err := uuid.Parse(c.Param("project_id")) + if err != nil { + return httperr.MalformedID(err) + } + ucID, err := uuid.Parse(c.Param("uploader_config_id")) + if err != nil { + return httperr.MalformedID(err) + } + mm := make([]dto.UploaderConfigMapping, 0) + if err := c.Bind(&mm); err != nil { + return httperr.MalformedBody(err) + } + if err := h.DBService.UploaderConfigMappingUpdateBatch(c.Request().Context(), ucID, mm); err != nil { + return httperr.InternalServerError(err) + } + return c.JSON(http.StatusOK, map[string]interface{}{"id": ucID}) +} + +// DeleteAllUploaderConfigMappingsForUploaderConfig godoc +// +// @Summary updates mappings for an uploader config +// @Tags uploader +// @Produce json +// @Param project_id path string true "project uuid" Format(uuid) +// @Param uploader_config_id path string true "uploader config uuid" Format(uuid) +// @Success 200 {object} map[string]interface{} +// @Failure 400 {object} echo.HTTPError +// @Failure 500 {object} echo.HTTPError +// @Router /projects/{project_id}/uploader_configs/{uploader_config_id}/mappings [delete] +func (h *ApiHandler) DeleteAllUploaderConfigMappingsForUploaderConfig(c echo.Context) error { + _, err := uuid.Parse(c.Param("project_id")) + if err != nil { + return httperr.MalformedID(err) + } + ucID, err := uuid.Parse(c.Param("uploader_config_id")) + if err != nil { + return httperr.MalformedID(err) + } + if err := h.DBService.UploaderConfigMappingDeleteForUploaderConfig(c.Request().Context(), ucID); err != nil { + return httperr.InternalServerError(err) + } + return c.JSON(http.StatusOK, map[string]interface{}{"id": ucID}) +} + +// UploadFileForUploaderConfig godoc +// +// @Summary uploads a file for an uploader config +// @Tags uploader +// @Produce json +// @Param project_id path string true "project uuid" Format(uuid) +// @Param uploader_config_id path string true "uploader config uuid" Format(uuid) +// @Success 201 created +// @Failure 400 {object} echo.HTTPError +// @Failure 500 {object} echo.HTTPError +// @Router /projects/{project_id}/uploader_configs/{uploader_config_id}/mappings [delete] +func (h *ApiHandler) UploadFileForUploaderConfig(c echo.Context) error { + projectID, err := uuid.Parse(c.Param("project_id")) + if err != nil { + return httperr.MalformedID(err) + } + ucID, err := uuid.Parse(c.Param("uploader_config_id")) + if err != nil { + return httperr.MalformedID(err) + } + file, err := c.FormFile("file") + if err != nil { + return err + } + src, err := file.Open() + if err != nil { + return err + } + defer src.Close() + if err := h.DBService.UploaderConfigUploadFile(c.Request().Context(), projectID, ucID, src); err != nil { + return httperr.InternalServerError(err) + } + return c.NoContent(http.StatusCreated) +} diff --git a/api/internal/middleware/audit.go b/api/internal/middleware/audit.go index f7edc46c..8d85e117 100644 --- a/api/internal/middleware/audit.go +++ b/api/internal/middleware/audit.go @@ -5,30 +5,31 @@ import ( "strconv" "strings" + "github.com/USACE/instrumentation-api/api/internal/db" + "github.com/USACE/instrumentation-api/api/internal/dto" "github.com/USACE/instrumentation-api/api/internal/httperr" - "github.com/USACE/instrumentation-api/api/internal/model" "github.com/golang-jwt/jwt/v5" "github.com/google/uuid" "github.com/labstack/echo/v4" ) -func mapClaims(user *jwt.Token) (model.ProfileClaims, error) { +func mapClaims(user *jwt.Token) (dto.ProfileClaims, error) { claims, ok := user.Claims.(jwt.MapClaims) if !ok { - return model.ProfileClaims{}, errors.New("unable to map claims") + return dto.ProfileClaims{}, errors.New("unable to map claims") } preferredUsername, ok := claims["preferred_username"].(string) if !ok || preferredUsername == "" { - return model.ProfileClaims{}, errors.New("error parsing token claims: email") + return dto.ProfileClaims{}, errors.New("error parsing token claims: email") } email, ok := claims["email"].(string) if !ok || email == "" { - return model.ProfileClaims{}, errors.New("error parsing token claims: email") + return dto.ProfileClaims{}, errors.New("error parsing token claims: email") } name, ok := claims["name"].(string) if !ok || name == "" { - return model.ProfileClaims{}, errors.New("error parsing token claims: name") + return dto.ProfileClaims{}, errors.New("error parsing token claims: name") } dnClaim, exists := claims["subjectDN"] @@ -36,7 +37,7 @@ func mapClaims(user *jwt.Token) (model.ProfileClaims, error) { if exists && dnClaim != nil { dnStr, ok := dnClaim.(string) if !ok { - return model.ProfileClaims{}, errors.New("error parsing token claims: subjectDN") + return dto.ProfileClaims{}, errors.New("error parsing token claims: subjectDN") } subjectDN = &dnStr } @@ -46,7 +47,7 @@ func mapClaims(user *jwt.Token) (model.ProfileClaims, error) { if exists && cacUIDClaim != nil { cacUIDClaims, err := strconv.Atoi(cacUIDClaim.(string)) if err != nil { - return model.ProfileClaims{}, errors.New("error parsing token claims: cacUID") + return dto.ProfileClaims{}, errors.New("error parsing token claims: cacUID") } cacUID = &cacUIDClaims } @@ -57,7 +58,7 @@ func mapClaims(user *jwt.Token) (model.ProfileClaims, error) { x509Presented = true } - return model.ProfileClaims{ + return dto.ProfileClaims{ PreferredUsername: preferredUsername, Name: name, Email: email, @@ -96,7 +97,7 @@ func (m *mw) AttachClaims(next echo.HandlerFunc) echo.HandlerFunc { func (m *mw) RequireClaims(next echo.HandlerFunc) echo.HandlerFunc { return func(c echo.Context) error { - _, ok := c.Get("claims").(model.ProfileClaims) + _, ok := c.Get("claims").(dto.ProfileClaims) if !ok { return httperr.Forbidden(errors.New("no valid claims for user")) } @@ -112,7 +113,7 @@ func (m *mw) AttachProfile(next echo.HandlerFunc) echo.HandlerFunc { // lookup superuser profile; the "EDIPI" of the Superuser is consistently 79. // The superuser is initialized as part of database and seed data initialization if c.Get("ApplicationKeyAuthSuccess") == true { - p, err := m.ProfileService.GetProfileWithTokensForEDIPI(ctx, 79) + p, err := m.DBService.ProfileGetForEDIPI(ctx, 79) if err != nil { return httperr.Forbidden(err) } @@ -123,7 +124,7 @@ func (m *mw) AttachProfile(next echo.HandlerFunc) echo.HandlerFunc { // If a User was authenticated via KeyAuth, lookup the user's profile using key_id if c.Get("KeyAuthSuccess") == true { keyID := c.Get("KeyAuthKeyID").(string) - p, err := m.ProfileService.GetProfileWithTokensForTokenID(ctx, keyID) + p, err := m.DBService.ProfileGetForToken(ctx, keyID) if err != nil { return httperr.Forbidden(err) } @@ -131,12 +132,12 @@ func (m *mw) AttachProfile(next echo.HandlerFunc) echo.HandlerFunc { return next(c) } - claims, ok := c.Get("claims").(model.ProfileClaims) + claims, ok := c.Get("claims").(dto.ProfileClaims) if !ok { return httperr.Forbidden(errors.New("could not bind claims from context")) } - p, err := m.ProfileService.GetProfileWithTokensForClaims(ctx, claims) + p, err := m.DBService.ProfileGetWithTokensForClaims(ctx, claims) if err != nil { return httperr.Forbidden(err) } @@ -149,7 +150,7 @@ func (m *mw) AttachProfile(next echo.HandlerFunc) echo.HandlerFunc { // IsApplicationAdmin checks that a profile is an application admin func (m *mw) IsApplicationAdmin(next echo.HandlerFunc) echo.HandlerFunc { return func(c echo.Context) error { - p, ok := c.Get("profile").(model.Profile) + p, ok := c.Get("profile").(db.VProfile) if !ok { return httperr.Unauthorized(errors.New("could not bind profile from context")) } @@ -164,7 +165,7 @@ func (m *mw) IsApplicationAdmin(next echo.HandlerFunc) echo.HandlerFunc { // ApplicationAdmin has automatic member/admin status for all projects func (m *mw) IsProjectAdmin(next echo.HandlerFunc) echo.HandlerFunc { return func(c echo.Context) error { - p, ok := c.Get("profile").(model.Profile) + p, ok := c.Get("profile").(db.VProfile) if !ok { return httperr.Unauthorized(errors.New("could not bind profile from context")) } @@ -175,7 +176,10 @@ func (m *mw) IsProjectAdmin(next echo.HandlerFunc) echo.HandlerFunc { if err != nil { return httperr.MalformedID(err) } - authorized, err := m.ProjectRoleService.IsProjectAdmin(c.Request().Context(), p.ID, projectID) + authorized, err := m.DBService.ProfileProjectRoleGetIsAdmin(c.Request().Context(), db.ProfileProjectRoleGetIsAdminParams{ + ProfileID: p.ID, + ProjectID: projectID, + }) if err != nil || !authorized { return httperr.ForbiddenRole(err) } @@ -187,7 +191,7 @@ func (m *mw) IsProjectAdmin(next echo.HandlerFunc) echo.HandlerFunc { // ApplicationAdmin has automatic member/admin status for all projects func (m *mw) IsProjectMember(next echo.HandlerFunc) echo.HandlerFunc { return func(c echo.Context) error { - p, ok := c.Get("profile").(model.Profile) + p, ok := c.Get("profile").(db.VProfile) if !ok { return httperr.Unauthorized(errors.New("could not bind profile from context")) } @@ -198,7 +202,10 @@ func (m *mw) IsProjectMember(next echo.HandlerFunc) echo.HandlerFunc { if err != nil { return httperr.MalformedID(err) } - authorized, err := m.ProjectRoleService.IsProjectMember(c.Request().Context(), p.ID, projectID) + authorized, err := m.DBService.ProfileProjectRoleGetIsMemberOrAdmin(c.Request().Context(), db.ProfileProjectRoleGetIsMemberOrAdminParams{ + ProfileID: p.ID, + ProjectID: projectID, + }) if err != nil || !authorized { return httperr.ForbiddenRole(err) } diff --git a/api/internal/middleware/key.go b/api/internal/middleware/key.go index 565cda6c..a2f95a0d 100644 --- a/api/internal/middleware/key.go +++ b/api/internal/middleware/key.go @@ -3,6 +3,7 @@ package middleware import ( "context" + "github.com/USACE/instrumentation-api/api/internal/db" "github.com/USACE/instrumentation-api/api/internal/password" "github.com/labstack/echo/v4" @@ -73,7 +74,7 @@ func keyAuth(isDisabled bool, appKey string, h HashExtractorFunc) echo.Middlewar func getHashExtractorFunc(ctx context.Context, m *mw) HashExtractorFunc { return func(keyID string) (string, error) { - k, err := m.ProfileService.GetTokenInfoByTokenID(ctx, keyID) + k, err := m.DBService.ProfileTokenGet(ctx, keyID) if err != nil { return "", err } @@ -90,7 +91,10 @@ type DataloggerHashExtractorFunc func(modelName, sn string) (string, error) func getDataloggerHashExtractorFunc(ctx context.Context, m *mw) DataloggerHashExtractorFunc { return func(modelName, sn string) (string, error) { - hash, err := m.DataloggerTelemetryService.GetDataloggerHashByModelSN(ctx, modelName, sn) + hash, err := m.DBService.DataloggerHashGetForModelSn(ctx, db.DataloggerHashGetForModelSnParams{ + Model: &modelName, + Sn: sn, + }) if err != nil { return "", err } diff --git a/api/internal/middleware/middleware.go b/api/internal/middleware/middleware.go index 7eaaf21c..4e8c7ffa 100644 --- a/api/internal/middleware/middleware.go +++ b/api/internal/middleware/middleware.go @@ -26,14 +26,12 @@ type Middleware interface { } type mw struct { - cfg *config.ServerConfig - ProfileService service.ProfileService - ProjectRoleService service.ProjectRoleService - DataloggerTelemetryService service.DataloggerTelemetryService + cfg *config.ServerConfig + DBService *service.DBService } var _ Middleware = (*mw)(nil) -func NewMiddleware(cfg *config.ServerConfig, profileService service.ProfileService, projectRoleService service.ProjectRoleService, dataloggerTelemetryService service.DataloggerTelemetryService) *mw { - return &mw{cfg, profileService, projectRoleService, dataloggerTelemetryService} +func NewMiddleware(cfg *config.ServerConfig, db *service.DBService) *mw { + return &mw{cfg, db} } diff --git a/api/internal/model/alert.go b/api/internal/model/alert.go deleted file mode 100644 index 382494f1..00000000 --- a/api/internal/model/alert.go +++ /dev/null @@ -1,115 +0,0 @@ -package model - -import ( - "context" - "time" - - "github.com/google/uuid" -) - -// Alert is an alert, triggered by an AlertConfig evaluating to true -type Alert struct { - Read *bool `json:"read,omitempty"` - ID uuid.UUID `json:"id"` - AlertConfigID uuid.UUID `json:"alert_config_id" db:"alert_config_id"` - ProjectID uuid.UUID `json:"project_id" db:"project_id"` - ProjectName string `json:"project_name" db:"project_name"` - Name string `json:"name"` - Body string `json:"body"` - CreateDate time.Time `json:"create_date" db:"create_date"` - Instruments dbJSONSlice[AlertConfigInstrument] `json:"instruments" db:"instruments"` -} - -const createAlerts = ` - INSERT INTO alert (alert_config_id) VALUES ($1) -` - -// CreateAlerts creates one or more new alerts -func (q *Queries) CreateAlerts(ctx context.Context, id uuid.UUID) error { - _, err := q.db.ExecContext(ctx, createAlerts, id) - return err -} - -const getAllAlertsForProject = ` - SELECT * FROM v_alert WHERE project_id = $1 -` - -// GetAllAlertsForProject lists all alerts for a given instrument ID -func (q *Queries) GetAllAlertsForProject(ctx context.Context, projectID uuid.UUID) ([]Alert, error) { - aa := make([]Alert, 0) - if err := q.db.SelectContext(ctx, &aa, getAllAlertsForProject, projectID); err != nil { - return nil, err - } - return aa, nil -} - -const getAllAlertsForInstrument = ` - SELECT * FROM v_alert - WHERE alert_config_id = ANY( - SELECT id FROM alert_config_instrument - WHERE instrument_id = $1 - ) -` - -// GetAllAlertsForInstrument lists all alerts for a given instrument ID -func (q *Queries) GetAllAlertsForInstrument(ctx context.Context, instrumentID uuid.UUID) ([]Alert, error) { - aa := make([]Alert, 0) - if err := q.db.SelectContext(ctx, &aa, getAllAlertsForInstrument, instrumentID); err != nil { - return nil, err - } - return aa, nil -} - -const getAllAlertsForProfile = ` - SELECT a.*, - CASE WHEN r.alert_id IS NOT NULL THEN true ELSE false - END AS read - FROM v_alert a - LEFT JOIN alert_read r ON r.alert_id = a.id - WHERE a.alert_config_id IN ( - SELECT alert_config_id - FROM alert_profile_subscription - WHERE profile_id = $1 - ) -` - -// GetAllAlertsForProfile returns all alerts for which a profile is subscribed to the AlertConfig -func (q *Queries) GetAllAlertsForProfile(ctx context.Context, profileID uuid.UUID) ([]Alert, error) { - aa := make([]Alert, 0) - if err := q.db.SelectContext(ctx, &aa, getAllAlertsForProfile, profileID); err != nil { - return nil, err - } - return aa, nil -} - -const getOneAlertForProfile = getAllAlertsForProfile + ` - AND a.id = $2 -` - -// GetOneAlertForProfile returns a single alert for which a profile is subscribed -func (q *Queries) GetOneAlertForProfile(ctx context.Context, profileID, alertID uuid.UUID) (Alert, error) { - var a Alert - err := q.db.GetContext(ctx, &a, getOneAlertForProfile, profileID, alertID) - return a, err -} - -const doAlertRead = ` - INSERT INTO alert_read (profile_id, alert_id) VALUES ($1, $2) - ON CONFLICT DO NOTHING -` - -// DoAlertRead marks an alert as read for a profile -func (q *Queries) DoAlertRead(ctx context.Context, profileID, alertID uuid.UUID) error { - _, err := q.db.ExecContext(ctx, doAlertRead, profileID, alertID) - return err -} - -const doAlertUnread = ` - DELETE FROM alert_read WHERE profile_id = $1 AND alert_id = $2 -` - -// DoAlertUnread marks an alert as unread for a profile -func (q *Queries) DoAlertUnread(ctx context.Context, profileID, alertID uuid.UUID) error { - _, err := q.db.ExecContext(ctx, doAlertUnread, profileID, alertID) - return err -} diff --git a/api/internal/model/alert_check.go b/api/internal/model/alert_check.go deleted file mode 100644 index 6b1c9bcb..00000000 --- a/api/internal/model/alert_check.go +++ /dev/null @@ -1,118 +0,0 @@ -package model - -import ( - "context" - "database/sql" - "errors" - - "github.com/google/uuid" -) - -var ( - GreenSubmittalStatusID uuid.UUID = uuid.MustParse("0c0d6487-3f71-4121-8575-19514c7b9f03") - YellowSubmittalStatusID uuid.UUID = uuid.MustParse("ef9a3235-f6e2-4e6c-92f6-760684308f7f") - RedSubmittalStatusID uuid.UUID = uuid.MustParse("84a0f437-a20a-4ac2-8a5b-f8dc35e8489b") - - MeasurementSubmittalAlertTypeID uuid.UUID = uuid.MustParse("97e7a25c-d5c7-4ded-b272-1bb6e5914fe3") - EvaluationSubmittalAlertTypeID uuid.UUID = uuid.MustParse("da6ee89e-58cc-4d85-8384-43c3c33a68bd") -) - -const ( - warning = "Warning" - alert = "Alert" - reminder = "Reminder" -) - -type AlertCheck struct { - AlertConfigID uuid.UUID `db:"alert_config_id"` - SubmittalID uuid.UUID `db:"submittal_id"` - ShouldWarn bool `db:"should_warn"` - ShouldAlert bool `db:"should_alert"` - ShouldRemind bool `db:"should_remind"` - Submittal Submittal `db:"-"` -} - -func (ck AlertCheck) GetShouldWarn() bool { - return ck.ShouldWarn -} - -func (ck AlertCheck) GetShouldAlert() bool { - return ck.ShouldAlert -} - -func (ck AlertCheck) GetShouldRemind() bool { - return ck.ShouldRemind -} - -func (ck AlertCheck) GetSubmittal() Submittal { - return ck.Submittal -} - -func (ck *AlertCheck) SetSubmittal(sub Submittal) { - ck.Submittal = sub -} - -type AlertConfigMap map[uuid.UUID]AlertConfig - -type SubmittalMap map[uuid.UUID]Submittal - -const listAndCheckAlertConfigs = ` - UPDATE alert_config ac1 - SET last_checked = now() - FROM ( - SELECT * - FROM v_alert_config - ) ac2 - WHERE ac1.id = ac2.id - RETURNING ac2.* -` - -func (q *Queries) ListAndCheckAlertConfigs(ctx context.Context) ([]AlertConfig, error) { - aa := make([]AlertConfig, 0) - if err := q.db.SelectContext(ctx, &aa, listAndCheckAlertConfigs); err != nil { - if errors.Is(err, sql.ErrNoRows) { - return make([]AlertConfig, 0), nil - } - return nil, err - } - return aa, nil -} - -const updateAlertConfigLastReminded = ` - UPDATE alert_config SET - last_reminded = $2 - WHERE id = $1 -` - -func (q *Queries) UpdateAlertConfigLastReminded(ctx context.Context, ac AlertConfig) error { - _, err := q.db.ExecContext(ctx, updateAlertConfigLastReminded, ac.ID, ac.LastReminded) - return err -} - -const updateSubmittalCompletionDateOrWarningSent = ` - UPDATE submittal SET - submittal_status_id = $2, - completion_date = $3, - warning_sent = $4 - WHERE id = $1 -` - -func (q *Queries) UpdateSubmittalCompletionDateOrWarningSent(ctx context.Context, sub Submittal) error { - _, err := q.db.ExecContext(ctx, updateSubmittalCompletionDateOrWarningSent, sub.ID, sub.SubmittalStatusID, sub.CompletionDate, sub.WarningSent) - return err -} - -const createNextSubmittalFromNewAlertConfigDate = ` - INSERT INTO submittal (alert_config_id, create_date, due_date) - SELECT - ac.id, - $2::TIMESTAMPTZ, - $2::TIMESTAMPTZ + ac.schedule_interval - FROM alert_config ac - WHERE ac.id = $1 -` - -func (q *Queries) CreateNextSubmittalFromNewAlertConfigDate(ctx context.Context, ac AlertConfig) error { - _, err := q.db.ExecContext(ctx, createNextSubmittalFromNewAlertConfigDate, ac.ID, ac.CreateNextSubmittalFrom) - return err -} diff --git a/api/internal/model/alert_config.go b/api/internal/model/alert_config.go deleted file mode 100644 index ed44180f..00000000 --- a/api/internal/model/alert_config.go +++ /dev/null @@ -1,239 +0,0 @@ -package model - -import ( - "context" - "database/sql" - "errors" - "fmt" - "time" - - "github.com/google/uuid" -) - -type AlertConfig struct { - ID uuid.UUID `json:"id" db:"id"` - Name string `json:"name" db:"name"` - Body string `json:"body" db:"body"` - ProjectID uuid.UUID `json:"project_id" db:"project_id"` - ProjectName string `json:"project_name" db:"project_name"` - AlertTypeID uuid.UUID `json:"alert_type_id" db:"alert_type_id"` - AlertType string `json:"alert_type" db:"alert_type"` - StartDate time.Time `json:"start_date" db:"start_date"` - ScheduleInterval string `json:"schedule_interval" db:"schedule_interval"` - RemindInterval string `json:"remind_interval" db:"remind_interval"` - WarningInterval string `json:"warning_interval" db:"warning_interval"` - LastChecked *time.Time `json:"last_checked" db:"last_checked"` - LastReminded *time.Time `json:"last_reminded" db:"last_reminded"` - Instruments dbJSONSlice[AlertConfigInstrument] `json:"instruments" db:"instruments"` - AlertEmailSubscriptions dbJSONSlice[EmailAutocompleteResult] `json:"alert_email_subscriptions" db:"alert_email_subscriptions"` - MuteConsecutiveAlerts bool `json:"mute_consecutive_alerts" db:"mute_consecutive_alerts"` - CreateNextSubmittalFrom *time.Time `json:"-" db:"-"` - AuditInfo -} - -type AlertConfigInstrument struct { - InstrumentID uuid.UUID `json:"instrument_id" db:"instrument_id"` - InstrumentName string `json:"instrument_name" db:"instrument_name"` -} - -func (a *AlertConfig) GetToAddresses() []string { - emails := make([]string, len(a.AlertEmailSubscriptions)) - for idx := range a.AlertEmailSubscriptions { - emails[idx] = a.AlertEmailSubscriptions[idx].Email - } - return emails -} - -const getAllAlertConfigsForProject = ` - SELECT * - FROM v_alert_config - WHERE project_id = $1 - ORDER BY name -` - -// GetAllAlertConfigsForProject lists all alert configs for a single project -func (q *Queries) GetAllAlertConfigsForProject(ctx context.Context, projectID uuid.UUID) ([]AlertConfig, error) { - aa := make([]AlertConfig, 0) - err := q.db.SelectContext(ctx, &aa, getAllAlertConfigsForProject, projectID) - return aa, err -} - -const qetAllAlertConfigsForProjectAndAlertType = ` - SELECT * - FROM v_alert_config - WHERE project_id = $1 - AND alert_type_id = $2 - ORDER BY name -` - -// GetAllAlertConfigsForProjectAndAlertType lists alert configs for a single project filetered by alert type -func (q *Queries) GetAllAlertConfigsForProjectAndAlertType(ctx context.Context, projectID, alertTypeID uuid.UUID) ([]AlertConfig, error) { - aa := make([]AlertConfig, 0) - err := q.db.SelectContext(ctx, &aa, qetAllAlertConfigsForProjectAndAlertType, projectID, alertTypeID) - return aa, err -} - -const getAllAlertConfigsForInstrument = ` - SELECT * - FROM v_alert_config - WHERE id = ANY( - SELECT alert_config_id - FROM alert_config_instrument - WHERE instrument_id = $1 - ) - ORDER BY name -` - -// GetAllAlertConfigsForInstrument lists all alerts for a single instrument -func (q *Queries) GetAllAlertConfigsForInstrument(ctx context.Context, instrumentID uuid.UUID) ([]AlertConfig, error) { - aa := make([]AlertConfig, 0) - err := q.db.SelectContext(ctx, &aa, getAllAlertConfigsForInstrument, instrumentID) - return aa, err -} - -const getOneAlertConfig = ` - SELECT * FROM v_alert_config WHERE id = $1 -` - -// GetOneAlertConfig gets a single alert -func (q *Queries) GetOneAlertConfig(ctx context.Context, alertConfigID uuid.UUID) (AlertConfig, error) { - var a AlertConfig - err := q.db.GetContext(ctx, &a, getOneAlertConfig, alertConfigID) - return a, err -} - -const createAlertConfig = ` - INSERT INTO alert_config ( - project_id, - name, - body, - alert_type_id, - start_date, - schedule_interval, - mute_consecutive_alerts, - remind_interval, - warning_interval, - creator, - create_date - ) VALUES ($1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11) - RETURNING id -` - -func (q *Queries) CreateAlertConfig(ctx context.Context, ac AlertConfig) (uuid.UUID, error) { - var alertConfigID uuid.UUID - err := q.db.GetContext(ctx, &alertConfigID, createAlertConfig, - ac.ProjectID, - ac.Name, - ac.Body, - ac.AlertTypeID, - ac.StartDate, - ac.ScheduleInterval, - ac.MuteConsecutiveAlerts, - ac.RemindInterval, - ac.WarningInterval, - ac.CreatorID, - ac.CreateDate, - ) - return alertConfigID, err -} - -const assignInstrumentToAlertConfig = ` - INSERT INTO alert_config_instrument (alert_config_id, instrument_id) VALUES ($1, $2) -` - -func (q *Queries) AssignInstrumentToAlertConfig(ctx context.Context, alertConfigID, instrumentID uuid.UUID) error { - _, err := q.db.ExecContext(ctx, assignInstrumentToAlertConfig, alertConfigID, instrumentID) - return err -} - -const unassignAllInstrumentsFromAlertConfig = ` - DELETE FROM alert_config_instrument WHERE alert_config_id = $1 -` - -func (q *Queries) UnassignAllInstrumentsFromAlertConfig(ctx context.Context, alertConfigID uuid.UUID) error { - _, err := q.db.ExecContext(ctx, unassignAllInstrumentsFromAlertConfig, alertConfigID) - return err -} - -const createNextSubmittalFromExistingAlertConfigDate = ` - INSERT INTO submittal (alert_config_id, due_date) - SELECT id, create_date + schedule_interval - FROM alert_config - WHERE id = $1 -` - -func (q *Queries) CreateNextSubmittalFromExistingAlertConfigDate(ctx context.Context, alertConfigID uuid.UUID) error { - _, err := q.db.ExecContext(ctx, createNextSubmittalFromExistingAlertConfigDate, alertConfigID) - return err -} - -const updateAlertConfig = ` - UPDATE alert_config SET - name = $3, - body = $4, - start_date = $5, - schedule_interval = $6, - mute_consecutive_alerts = $7, - remind_interval = $8, - warning_interval = $9, - updater = $10, - update_date = $11 - WHERE id = $1 AND project_id = $2 -` - -func (q *Queries) UpdateAlertConfig(ctx context.Context, ac AlertConfig) error { - _, err := q.db.ExecContext(ctx, updateAlertConfig, - ac.ID, - ac.ProjectID, - ac.Name, - ac.Body, - ac.StartDate, - ac.ScheduleInterval, - ac.MuteConsecutiveAlerts, - ac.RemindInterval, - ac.WarningInterval, - ac.UpdaterID, - ac.UpdateDate, - ) - return err -} - -const updateFutureSubmittalForAlertConfig = ` - UPDATE submittal - SET due_date = sq.new_due_date - FROM ( - SELECT - sub.id AS submittal_id, - sub.create_date + ac.schedule_interval AS new_due_date - FROM submittal sub - INNER JOIN alert_config ac ON sub.alert_config_id = ac.id - WHERE sub.alert_config_id = $1 - AND sub.due_date > NOW() - AND sub.completion_date IS NULL - AND NOT sub.marked_as_missing - ) sq - WHERE id = sq.submittal_id - AND sq.new_due_date > NOW() - RETURNING id -` - -func (q *Queries) UpdateFutureSubmittalForAlertConfig(ctx context.Context, alertConfigID uuid.UUID) error { - var updatedSubID uuid.UUID - if err := q.db.GetContext(ctx, &updatedSubID, updateFutureSubmittalForAlertConfig, alertConfigID); err != nil { - if errors.Is(err, sql.ErrNoRows) { - return fmt.Errorf("updated alert config new due date must be in the future! complete the current submittal before updating") - } - return err - } - return nil -} - -const deleteAlertConfig = ` - UPDATE alert_config SET deleted=true WHERE id = $1 -` - -// DeleteAlertConfig deletes an alert by ID -func (q *Queries) DeleteAlertConfig(ctx context.Context, alertConfigID uuid.UUID) error { - _, err := q.db.ExecContext(ctx, deleteAlertConfig, alertConfigID) - return err -} diff --git a/api/internal/model/alert_subscription.go b/api/internal/model/alert_subscription.go deleted file mode 100644 index 66d2e564..00000000 --- a/api/internal/model/alert_subscription.go +++ /dev/null @@ -1,209 +0,0 @@ -package model - -import ( - "context" - "encoding/json" - - "github.com/USACE/instrumentation-api/api/internal/util" - "github.com/google/uuid" -) - -// AlertSubscription is a profile subscription to an alert -type AlertSubscription struct { - ID uuid.UUID `json:"id"` - AlertConfigID uuid.UUID `json:"alert_config_id" db:"alert_config_id"` - ProfileID uuid.UUID `json:"profile_id" db:"profile_id"` - AlertSubscriptionSettings -} - -// AlertSubscriptionSettings holds all settings for an AlertSubscription -type AlertSubscriptionSettings struct { - MuteUI bool `json:"mute_ui" db:"mute_ui"` - MuteNotify bool `json:"mute_notify" db:"mute_notify"` -} - -// AlertSubscriptionCollection is a collection of AlertSubscription items -type AlertSubscriptionCollection struct { - Items []AlertSubscription `json:"items"` -} - -// EmailAlert is an email subscription to an alert -type EmailAlert struct { - ID uuid.UUID `json:"id"` - AlertConfigID uuid.UUID `json:"alert_config_id"` - EmailID uuid.UUID `json:"profile_id"` - MuteNotify bool `json:"mute_notify" db:"mute_notify"` -} - -type Email struct { - ID uuid.UUID `json:"id" db:"id"` - Email string `json:"email" db:"email"` -} - -// UnmarshalJSON implements the UnmarshalJSON Interface for AlertSubscription -func (c *AlertSubscriptionCollection) UnmarshalJSON(b []byte) error { - switch util.JSONType(b) { - case "ARRAY": - if err := json.Unmarshal(b, &c.Items); err != nil { - return err - } - case "OBJECT": - var a AlertSubscription - if err := json.Unmarshal(b, &a); err != nil { - return err - } - c.Items = []AlertSubscription{a} - default: - c.Items = make([]AlertSubscription, 0) - } - return nil -} - -const subscribeProfileToAlerts = ` - INSERT INTO alert_profile_subscription (alert_config_id, profile_id) - VALUES ($1, $2) - ON CONFLICT DO NOTHING -` - -// SubscribeProfileToAlerts subscribes a profile to an instrument alert -func (q *Queries) SubscribeProfileToAlerts(ctx context.Context, alertConfigID, profileID uuid.UUID) error { - _, err := q.db.ExecContext(ctx, subscribeProfileToAlerts, alertConfigID, profileID) - return err -} - -const unsubscribeProfileToAlerts = ` - DELETE FROM alert_profile_subscription WHERE alert_config_id = $1 AND profile_id = $2 -` - -// UnsubscribeProfileToAlerts subscribes a profile to an instrument alert -func (q *Queries) UnsubscribeProfileToAlerts(ctx context.Context, alertConfigID, profileID uuid.UUID) error { - _, err := q.db.ExecContext(ctx, unsubscribeProfileToAlerts, alertConfigID, profileID) - return err -} - -const getAlertSubscription = ` - SELECT * FROM alert_profile_subscription WHERE alert_config_id = $1 AND profile_id = $2 -` - -// GetAlertSubscription returns a AlertSubscription -func (q *Queries) GetAlertSubscription(ctx context.Context, alertConfigID, profileID uuid.UUID) (AlertSubscription, error) { - var a AlertSubscription - err := q.db.GetContext(ctx, &a, getAlertSubscription, alertConfigID, profileID) - return a, err -} - -const getAlertSubscriptionByID = ` - SELECT * FROM alert_profile_subscription WHERE id = $1 -` - -// GetAlertSubscriptionByID returns an alert subscription -func (q *Queries) GetAlertSubscriptionByID(ctx context.Context, subscriptionID uuid.UUID) (AlertSubscription, error) { - var a AlertSubscription - err := q.db.GetContext(ctx, &a, getAlertSubscriptionByID, subscriptionID) - return a, err -} - -const listMyAlertSubscriptions = ` - SELECT * FROM alert_profile_subscription WHERE profile_id = $1 -` - -// ListMyAlertSubscriptions returns all profile_alerts for a given profile ID -func (q *Queries) ListMyAlertSubscriptions(ctx context.Context, profileID uuid.UUID) ([]AlertSubscription, error) { - aa := make([]AlertSubscription, 0) - if err := q.db.SelectContext(ctx, &aa, listMyAlertSubscriptions, profileID); err != nil { - return nil, err - } - return aa, nil -} - -const updateMyAlertSubscription = ` - UPDATE alert_profile_subscription SET mute_ui=$1, mute_notify=$2 WHERE alert_config_id=$3 AND profile_id=$4 -` - -// UpdateMyAlertSubscription updates properties on a AlertSubscription -func (q *Queries) UpdateMyAlertSubscription(ctx context.Context, s AlertSubscription) error { - _, err := q.db.ExecContext(ctx, updateMyAlertSubscription, s.MuteUI, s.MuteNotify, s.AlertConfigID, s.ProfileID) - return err -} - -const registerEmail = ` - WITH e AS ( - INSERT INTO email (email) VALUES ($1) - ON CONFLICT ON CONSTRAINT unique_email DO NOTHING - RETURNING id - ) - SELECT id FROM e - UNION - SELECT id from email WHERE email = $1 -` - -func (q *Queries) RegisterEmail(ctx context.Context, emailAddress string) (uuid.UUID, error) { - var newID uuid.UUID - err := q.db.GetContext(ctx, &newID, registerEmail, emailAddress) - return newID, err -} - -const unregisterEmail = ` - DELETE FROM email WHERE id = $1 -` - -func (q *Queries) UnregisterEmail(ctx context.Context, emailID uuid.UUID) error { - _, err := q.db.ExecContext(ctx, unregisterEmail, emailID) - return err -} - -const subscribeEmailToAlertConfig = ` - INSERT INTO alert_email_subscription (alert_config_id, email_id) VALUES ($1,$2) - ON CONFLICT ON CONSTRAINT email_unique_alert_config DO NOTHING -` - -func (q *Queries) SubscribeEmailToAlertConfig(ctx context.Context, alertConfigID, emailID uuid.UUID) error { - _, err := q.db.ExecContext(ctx, subscribeEmailToAlertConfig, alertConfigID, emailID) - return err -} - -const subscribeProfileToAlertConfig = ` - INSERT INTO alert_profile_subscription (alert_config_id, profile_id) VALUES ($1,$2) - ON CONFLICT ON CONSTRAINT profile_unique_alert_config DO NOTHING -` - -func (q *Queries) SubscribeProfileToAlertConfig(ctx context.Context, alertConfigID, emailID uuid.UUID) error { - _, err := q.db.ExecContext(ctx, subscribeProfileToAlertConfig, alertConfigID, emailID) - return err -} - -const unsubscribeEmailFromAlertConfig = ` - DELETE FROM alert_email_subscription WHERE alert_config_id = $1 AND email_id = $2 -` - -func (q *Queries) UnsubscribeEmailFromAlertConfig(ctx context.Context, alertConfigID, emailID uuid.UUID) error { - _, err := q.db.ExecContext(ctx, unsubscribeEmailFromAlertConfig, alertConfigID, emailID) - return err -} - -const unsubscribeProfileFromAlertConfig = ` - DELETE FROM alert_profile_subscription WHERE alert_config_id = $1 AND profile_id = $2 -` - -func (q *Queries) UnsubscribeProfileFromAlertConfig(ctx context.Context, alertConfigID, emailID uuid.UUID) error { - _, err := q.db.ExecContext(ctx, unsubscribeProfileFromAlertConfig, alertConfigID, emailID) - return err -} - -const unsubscribeAllEmailsFromAlertConfig = ` - DELETE FROM alert_email_subscription WHERE alert_config_id = $1 -` - -func (q *Queries) UnsubscribeAllEmailsFromAlertConfig(ctx context.Context, alertConfigID uuid.UUID) error { - _, err := q.db.ExecContext(ctx, unsubscribeAllEmailsFromAlertConfig, alertConfigID) - return err -} - -const unsubscribeAllProfilesFromAlertConfig = ` - DELETE FROM alert_profile_subscription WHERE alert_config_id = $1 -` - -func (q *Queries) UnsubscribeAllProfilesFromAlertConfig(ctx context.Context, alertConfigID uuid.UUID) error { - _, err := q.db.ExecContext(ctx, unsubscribeAllProfilesFromAlertConfig, alertConfigID) - return err -} diff --git a/api/internal/model/autocomplete.go b/api/internal/model/autocomplete.go deleted file mode 100644 index e84f236a..00000000 --- a/api/internal/model/autocomplete.go +++ /dev/null @@ -1,31 +0,0 @@ -package model - -import ( - "context" - - "github.com/google/uuid" -) - -// EmailAutocompleteResult stores search result in profiles and emails -type EmailAutocompleteResult struct { - ID uuid.UUID `json:"id"` - UserType string `json:"user_type" db:"user_type"` - Username *string `json:"username"` - Email string `json:"email"` -} - -const listEmailAutocomplete = ` - SELECT id, user_type, username, email - FROM v_email_autocomplete - WHERE username_email ILIKE '%'||$1||'%' - LIMIT $2 -` - -// ListEmailAutocomplete returns search results for email autocomplete -func (q *Queries) ListEmailAutocomplete(ctx context.Context, emailInput string, limit int) ([]EmailAutocompleteResult, error) { - aa := make([]EmailAutocompleteResult, 0) - if err := q.db.SelectContext(ctx, &aa, listEmailAutocomplete, emailInput, limit); err != nil { - return nil, err - } - return aa, nil -} diff --git a/api/internal/model/aware.go b/api/internal/model/aware.go deleted file mode 100644 index bda30d05..00000000 --- a/api/internal/model/aware.go +++ /dev/null @@ -1,67 +0,0 @@ -package model - -import ( - "context" - - "github.com/google/uuid" -) - -// AwareParameter struct -type AwareParameter struct { - ID uuid.UUID `json:"id"` - Key string `json:"key"` - ParameterID uuid.UUID `json:"parameter_id" db:"parameter_id"` - UnitID uuid.UUID `json:"unit_id" db:"unit_id"` -} - -// AwarePlatformParameterConfig holds information about which parameters are "enabled" for given instrument(s) -// { projectID: , instrument_id: , aware_id: , aware_parameters: { : } } -// aware_parameters is a map of : -type AwarePlatformParameterConfig struct { - InstrumentID uuid.UUID `json:"instrument_id" db:"instrument_id"` - AwareID uuid.UUID `json:"aware_id" db:"aware_id"` - AwareParameters map[string]*uuid.UUID `json:"aware_parameters"` -} - -type AwarePlatformParameterEnabled struct { - InstrumentID uuid.UUID `json:"instrument_id" db:"instrument_id"` - AwareID uuid.UUID `json:"aware_id" db:"aware_id"` - AwareParameterKey string `json:"aware_parameter_key" db:"aware_parameter_key"` - TimeseriesID *uuid.UUID `json:"timeseries_id" db:"timeseries_id"` -} - -const listAwareParameters = ` - SELECT id, key, parameter_id, unit_id FROM aware_parameter -` - -// ListAwareParameters returns aware parameters -func (q *Queries) ListAwareParameters(ctx context.Context) ([]AwareParameter, error) { - pp := make([]AwareParameter, 0) - if err := q.db.SelectContext(ctx, &pp, listAwareParameters); err != nil { - return nil, err - } - return pp, nil -} - -const listAwarePlatformParameterEnabled = ` - SELECT instrument_id, aware_id, aware_parameter_key, timeseries_id - FROM v_aware_platform_parameter_enabled - ORDER BY aware_id, aware_parameter_key -` - -func (q *Queries) ListAwarePlatformParameterEnabled(ctx context.Context) ([]AwarePlatformParameterEnabled, error) { - aa := make([]AwarePlatformParameterEnabled, 0) - if err := q.db.SelectContext(ctx, &aa, listAwarePlatformParameterEnabled); err != nil { - return nil, err - } - return aa, nil -} - -const createAwarePlatform = ` - INSERT INTO aware_platform (instrument_id, aware_id) VALUES ($1, $2) -` - -func (q *Queries) CreateAwarePlatform(ctx context.Context, instrumentID, awareID uuid.UUID) error { - _, err := q.db.ExecContext(ctx, createAwarePlatform, &instrumentID, &awareID) - return err -} diff --git a/api/internal/model/collection_group.go b/api/internal/model/collection_group.go deleted file mode 100644 index 89ede791..00000000 --- a/api/internal/model/collection_group.go +++ /dev/null @@ -1,149 +0,0 @@ -package model - -import ( - "context" - "time" - - "github.com/google/uuid" -) - -// CollectionGroup holds information for entity collection_group -type CollectionGroup struct { - ID uuid.UUID `json:"id" db:"id"` - ProjectID uuid.UUID `json:"project_id" db:"project_id"` - Slug string `json:"slug" db:"slug"` - Name string `json:"name" db:"name"` - AuditInfo -} - -// CollectionGroupDetails holds same information as a CollectionGroup -// In Addition, contains array of structs; Each struct contains -// all fields for Timeseries AND additional latest_value, latest_time -type CollectionGroupDetails struct { - CollectionGroup - Timeseries []collectionGroupDetailsTimeseries `json:"timeseries"` -} - -// collectionGroupDetailsTimeseriesItem is a Timeseries with a little bit of extra information -type collectionGroupDetailsTimeseries struct { - Timeseries - LatestTime *time.Time `json:"latest_time" db:"latest_time"` - LatestValue *float32 `json:"latest_value" db:"latest_value"` -} - -const listCollectionGroups = ` - SELECT id, project_id, slug, name, creator, create_date, updater, update_date - FROM collection_group - WHERE project_id = $1 -` - -// ListCollectionGroups lists all collection groups for a project -func (q *Queries) ListCollectionGroups(ctx context.Context, projectID uuid.UUID) ([]CollectionGroup, error) { - aa := make([]CollectionGroup, 0) - if err := q.db.SelectContext(ctx, &aa, listCollectionGroups, projectID); err != nil { - return nil, err - } - return aa, nil -} - -const getCollectionGroupDetails = listCollectionGroups + ` - AND id = $2 -` - -// GetCollectionGroupDetails returns details for a single CollectionGroup -func (q *Queries) GetCollectionGroupDetails(ctx context.Context, projectID, collectionGroupID uuid.UUID) (CollectionGroupDetails, error) { - var a CollectionGroupDetails - if err := q.db.GetContext(ctx, &a, getCollectionGroupDetails, projectID, collectionGroupID); err != nil { - return a, err - } - return a, nil -} - -const getCollectionGroupDetailsTimeseries = ` - SELECT t.*, tm.time as latest_time, tm.value as latest_value - FROM collection_group_timeseries cgt - INNER JOIN collection_group cg on cg.id = cgt.collection_group_id - INNER JOIN v_timeseries t on t.id = cgt.timeseries_id - LEFT JOIN timeseries_measurement tm on tm.timeseries_id = t.id and tm.time = ( - SELECT time FROM timeseries_measurement - WHERE timeseries_id = t.id - ORDER BY time DESC LIMIT 1 - ) - WHERE t.instrument_id = ANY( - SELECT instrument_id - FROM project_instrument - WHERE project_id = $1 - ) - AND cgt.collection_group_id = $2 -` - -// GetCollectionGroupDetails returns details for a single CollectionGroup -func (q *Queries) GetCollectionGroupDetailsTimeseries(ctx context.Context, projectID, collectionGroupID uuid.UUID) ([]collectionGroupDetailsTimeseries, error) { - aa := make([]collectionGroupDetailsTimeseries, 0) - if err := q.db.SelectContext(ctx, &aa, getCollectionGroupDetailsTimeseries, projectID, collectionGroupID); err != nil { - return nil, err - } - return aa, nil -} - -const createCollectionGroup = ` - INSERT INTO collection_group (project_id, name, slug, creator, create_date, updater, update_date) - VALUES ($1, $2::varchar, slugify($2::varchar, 'collection_group'), $3, $4, $5, $6) - RETURNING id, project_id, name, slug, creator, create_date, updater, update_date -` - -// CreateCollectionGroup creates a new collection group -func (q *Queries) CreateCollectionGroup(ctx context.Context, cg CollectionGroup) (CollectionGroup, error) { - var cgNew CollectionGroup - if err := q.db.GetContext(ctx, &cgNew, createCollectionGroup, cg.ProjectID, cg.Name, cg.CreatorID, cg.CreateDate, cg.UpdaterID, cg.UpdateDate); err != nil { - return cgNew, err - } - return cgNew, nil -} - -const updateCollectionGroup = ` - UPDATE collection_group SET name=$3, updater=$4, update_date=$5 - WHERE project_id=$1 AND id=$2 - RETURNING id, project_id, name, slug, creator, create_date, updater, update_date -` - -// UpdateCollectionGroup updates an existing collection group's metadata -func (q *Queries) UpdateCollectionGroup(ctx context.Context, cg CollectionGroup) (CollectionGroup, error) { - var cgUpdated CollectionGroup - if err := q.db.GetContext(ctx, &cgUpdated, updateCollectionGroup, cg.ProjectID, cg.ID, cg.Name, cg.UpdaterID, cg.UpdateDate); err != nil { - return cgUpdated, err - } - return cgUpdated, nil -} - -const deleteCollectionGroup = ` - DELETE FROM collection_group WHERE project_id=$1 AND id=$2 -` - -// DeleteCollectionGroup deletes a collection group and associated timeseries relationships -// using the id of the collection group -func (q *Queries) DeleteCollectionGroup(ctx context.Context, projectID, collectionGroupID uuid.UUID) error { - _, err := q.db.ExecContext(ctx, deleteCollectionGroup, projectID, collectionGroupID) - return err -} - -const addTimeseriesToCollectionGroup = ` - INSERT INTO collection_group_timeseries (collection_group_id, timeseries_id) VALUES ($1, $2) - ON CONFLICT ON CONSTRAINT collection_group_unique_timeseries DO NOTHING -` - -// AddTimeseriesToCollectionGroup adds a timeseries to a collection group -func (q *Queries) AddTimeseriesToCollectionGroup(ctx context.Context, collectionGroupID, timeseriesID uuid.UUID) error { - _, err := q.db.ExecContext(ctx, addTimeseriesToCollectionGroup, collectionGroupID, timeseriesID) - return err -} - -const removeTimeseriesFromCollectionGroup = ` - DELETE FROM collection_group_timeseries WHERE collection_group_id=$1 AND timeseries_id = $2 -` - -// RemoveTimeseriesFromCollectionGroup removes a timeseries from a collection group -func (q *Queries) RemoveTimeseriesFromCollectionGroup(ctx context.Context, collectionGroupID, timeseriesID uuid.UUID) error { - _, err := q.db.ExecContext(ctx, removeTimeseriesFromCollectionGroup, collectionGroupID, timeseriesID) - return err -} diff --git a/api/internal/model/common.go b/api/internal/model/common.go deleted file mode 100644 index 66432b87..00000000 --- a/api/internal/model/common.go +++ /dev/null @@ -1,41 +0,0 @@ -package model - -import ( - "time" - - "github.com/google/uuid" -) - -// AuditInfo holds common information about object creator and updater -type AuditInfo struct { - CreatorID uuid.UUID `json:"creator_id" db:"creator"` - CreatorUsername *string `json:"creator_username,omitempty" db:"creator_username"` - CreateDate time.Time `json:"create_date" db:"create_date"` - UpdaterID *uuid.UUID `json:"updater_id" db:"updater"` - UpdaterUsername *string `json:"updater_username,omitempty" db:"updater_username"` - UpdateDate *time.Time `json:"update_date" db:"update_date"` -} - -type IDSlug struct { - ID uuid.UUID `json:"id"` - Slug string `json:"slug"` -} - -type IDName struct { - ID uuid.UUID `json:"id"` - Name string `json:"name"` -} - -type IDSlugName struct { - IDSlug - Name string `json:"name,omitempty"` -} - -type IDSlugCollection struct { - Items []IDSlug `json:"items"` -} - -// Shortener allows a shorter representation of an object. Typically, ID and Slug fields -type Shortener[T any] interface { - Shorten() T -} diff --git a/api/internal/model/datalogger.go b/api/internal/model/datalogger.go deleted file mode 100644 index a1c6fa1a..00000000 --- a/api/internal/model/datalogger.go +++ /dev/null @@ -1,271 +0,0 @@ -package model - -import ( - "context" - "database/sql" - "errors" - "time" - - "github.com/USACE/instrumentation-api/api/internal/password" - "github.com/google/uuid" - "github.com/jackc/pgtype" -) - -// Telemetry struct -type Telemetry struct { - ID uuid.UUID - TypeID string - TypeSlug string - TypeName string -} - -type Datalogger struct { - ID uuid.UUID `json:"id" db:"id"` - Name string `json:"name" db:"name"` - SN string `json:"sn" db:"sn"` - ProjectID uuid.UUID `json:"project_id" db:"project_id"` - Slug string `json:"slug" db:"slug"` - ModelID uuid.UUID `json:"model_id" db:"model_id"` - Model *string `json:"model" db:"model"` - Errors []string `json:"errors" db:"-"` - PgErrors pgtype.TextArray `json:"-" db:"errors"` - Tables dbJSONSlice[DataloggerTable] `json:"tables" db:"tables"` - AuditInfo -} - -type DataloggerWithKey struct { - Datalogger - Key string `json:"key"` -} - -type DataloggerTable struct { - ID uuid.UUID `json:"id" db:"id"` - TableName string `json:"table_name" db:"table_name"` -} - -type DataloggerTablePreview struct { - DataloggerTableID uuid.UUID `json:"datalogger_table_id" db:"datalogger_table_id"` - UpdateDate time.Time `json:"update_date" db:"update_date"` - Preview pgtype.JSON `json:"preview" db:"preview"` -} - -type DataloggerError struct { - DataloggerTableID uuid.UUID `json:"datalogger_id" db:"datalogger_id"` - Errors []string `json:"errors" db:"errors"` -} - -const getDataloggerModelName = ` - SELECT model FROM datalogger_model WHERE id = $1 -` - -func (q *Queries) GetDataloggerModelName(ctx context.Context, modelID uuid.UUID) (string, error) { - var modelName string - if err := q.db.GetContext(ctx, &modelName, getDataloggerModelName, modelID); err != nil { - return "", err - } - return modelName, nil -} - -const listProjectDataloggers = ` - SELECT * FROM v_datalogger WHERE project_id = $1 -` - -func (q *Queries) ListProjectDataloggers(ctx context.Context, projectID uuid.UUID) ([]Datalogger, error) { - dls := make([]Datalogger, 0) - if err := q.db.SelectContext(ctx, &dls, listProjectDataloggers, projectID); err != nil { - return make([]Datalogger, 0), err - } - for i := 0; i < len(dls); i++ { - if err := dls[i].PgErrors.AssignTo(&dls[i].Errors); err != nil { - return make([]Datalogger, 0), err - } - } - return dls, nil -} - -const listAllDataloggers = ` - SELECT * FROM v_datalogger -` - -func (q *Queries) ListAllDataloggers(ctx context.Context) ([]Datalogger, error) { - dls := make([]Datalogger, 0) - if err := q.db.SelectContext(ctx, &dls, listAllDataloggers); err != nil { - return make([]Datalogger, 0), err - } - for i := 0; i < len(dls); i++ { - if err := dls[i].PgErrors.AssignTo(&dls[i].Errors); err != nil { - return make([]Datalogger, 0), err - } - } - return dls, nil -} - -const getDataloggerIsActive = ` - SELECT EXISTS (SELECT * FROM v_datalogger WHERE model = $1 AND sn = $2)::int -` - -// GetDataloggerIsActive checks if datalogger with sn already exists and is not deleted -func (q *Queries) GetDataloggerIsActive(ctx context.Context, modelName, sn string) (bool, error) { - var isActive bool - if err := q.db.GetContext(ctx, &isActive, getDataloggerIsActive, modelName, sn); err != nil { - return false, err - } - return isActive, nil -} - -const verifyDataloggerExists = ` - SELECT id FROM v_datalogger WHERE id = $1 -` - -// VerifyDataloggerExists checks if datalogger with sn already exists and is not deleted -func (q *Queries) VerifyDataloggerExists(ctx context.Context, dlID uuid.UUID) error { - return q.db.GetContext(ctx, &uuid.UUID{}, verifyDataloggerExists, dlID) -} - -const createDataloggerHash = ` - INSERT INTO datalogger_hash (datalogger_id, "hash") VALUES ($1, $2) -` - -func (q *Queries) CreateDataloggerHash(ctx context.Context, dataloggerID uuid.UUID) (string, error) { - key := password.GenerateRandom(40) - if _, err := q.db.ExecContext(ctx, createDataloggerHash, dataloggerID, password.MustCreateHash(key, password.DefaultParams)); err != nil { - return "", err - } - return key, nil -} - -const getOneDatalogger = ` - SELECT * FROM v_datalogger WHERE id = $1 -` - -func (q *Queries) GetOneDatalogger(ctx context.Context, dataloggerID uuid.UUID) (Datalogger, error) { - var dl Datalogger - if err := q.db.GetContext(ctx, &dl, getOneDatalogger, dataloggerID); err != nil { - return dl, err - } - if err := dl.PgErrors.AssignTo(&dl.Errors); err != nil { - return dl, err - } - return dl, nil -} - -const createDatalogger = ` - INSERT INTO datalogger (name, sn, project_id, creator, updater, slug, model_id) - VALUES ($1, $2, $3, $4, $4, slugify($1, 'datalogger'), $5) - RETURNING id -` - -func (q *Queries) CreateDatalogger(ctx context.Context, dl Datalogger) (uuid.UUID, error) { - var dlID uuid.UUID - err := q.db.GetContext(ctx, &dlID, createDatalogger, dl.Name, dl.SN, dl.ProjectID, dl.CreatorID, dl.ModelID) - return dlID, err -} - -const updateDatalogger = ` - UPDATE datalogger SET - name = $2, - updater = $3, - update_date = $4 - WHERE id = $1 -` - -func (q *Queries) UpdateDatalogger(ctx context.Context, dl Datalogger) error { - _, err := q.db.ExecContext(ctx, updateDatalogger, dl.ID, dl.Name, dl.UpdaterID, dl.UpdateDate) - return err -} - -const updateDataloggerHash = ` - UPDATE datalogger_hash SET "hash" = $2 WHERE datalogger_id = $1 -` - -func (q *Queries) UpdateDataloggerHash(ctx context.Context, dataloggerID uuid.UUID) (string, error) { - key := password.GenerateRandom(40) - if _, err := q.db.ExecContext(ctx, updateDataloggerHash, dataloggerID, password.MustCreateHash(key, password.DefaultParams)); err != nil { - return "", err - } - return key, nil -} - -const updateDataloggerUpdater = ` - UPDATE datalogger SET updater = $2, update_date = $3 WHERE id = $1 -` - -func (q *Queries) UpdateDataloggerUpdater(ctx context.Context, dl Datalogger) error { - _, err := q.db.ExecContext(ctx, updateDataloggerUpdater, dl.ID, dl.UpdaterID, dl.UpdateDate) - return err -} - -const deleteDatalogger = ` - UPDATE datalogger SET deleted = true, updater = $2, update_date = $3 WHERE id = $1 -` - -func (q *Queries) DeleteDatalogger(ctx context.Context, dl Datalogger) error { - _, err := q.db.ExecContext(ctx, deleteDatalogger, dl.ID, dl.UpdaterID, dl.UpdateDate) - return err -} - -const getDataloggerTablePreview = ` - SELECT * FROM v_datalogger_preview WHERE datalogger_table_id = $1 -` - -func (q *Queries) GetDataloggerTablePreview(ctx context.Context, dataloggerTableID uuid.UUID) (DataloggerTablePreview, error) { - var dlp DataloggerTablePreview - err := q.db.GetContext(ctx, &dlp, getDataloggerTablePreview, dataloggerTableID) - if errors.Is(err, sql.ErrNoRows) { - dlp.DataloggerTableID = dataloggerTableID - if err := dlp.Preview.Set("null"); err != nil { - return DataloggerTablePreview{}, err - } - return dlp, nil - } - return dlp, err -} - -const resetDataloggerTableName = ` - UPDATE datalogger_table SET table_name = '' WHERE id = $1 -` - -func (q *Queries) ResetDataloggerTableName(ctx context.Context, dataloggerTableID uuid.UUID) error { - _, err := q.db.ExecContext(ctx, resetDataloggerTableName, dataloggerTableID) - return err -} - -const renameEmptyDataloggerTableName = ` - UPDATE datalogger_table - SET table_name = $2 - WHERE table_name = '' AND datalogger_id = $1 - AND NOT EXISTS ( - SELECT 1 FROM datalogger_table WHERE datalogger_id = $1 AND table_name = $2 - ); -` - -func (q *Queries) RenameEmptyDataloggerTableName(ctx context.Context, dataloggerID uuid.UUID, tableName string) error { - _, err := q.db.ExecContext(ctx, renameEmptyDataloggerTableName, dataloggerID, tableName) - return err -} - -const getOrCreateDataloggerTable = ` - WITH dt AS ( - INSERT INTO datalogger_table (datalogger_id, table_name) VALUES ($1, $2) - ON CONFLICT ON CONSTRAINT datalogger_table_datalogger_id_table_name_key DO NOTHING - RETURNING id - ) - SELECT id FROM dt - UNION - SELECT id FROM datalogger_table WHERE datalogger_id = $1 AND table_name = $2 -` - -func (q *Queries) GetOrCreateDataloggerTable(ctx context.Context, dataloggerID uuid.UUID, tableName string) (uuid.UUID, error) { - var tID uuid.UUID - err := q.db.GetContext(ctx, &tID, getOrCreateDataloggerTable, dataloggerID, tableName) - return tID, err -} - -const deleteDataloggerTable = ` - DELETE FROM datalogger_table WHERE id = $1 -` - -func (q *Queries) DeleteDataloggerTable(ctx context.Context, dataloggerTableID uuid.UUID) error { - _, err := q.db.ExecContext(ctx, deleteDataloggerTable, dataloggerTableID) - return err -} diff --git a/api/internal/model/datalogger_telemetry.go b/api/internal/model/datalogger_telemetry.go deleted file mode 100644 index b8b36e3b..00000000 --- a/api/internal/model/datalogger_telemetry.go +++ /dev/null @@ -1,82 +0,0 @@ -package model - -import ( - "context" - "database/sql" - - "github.com/google/uuid" -) - -const getDataloggerByModelSN = ` - SELECT * FROM v_datalogger - WHERE model = $1 AND sn = $2 -` - -func (q *Queries) GetDataloggerByModelSN(ctx context.Context, modelName, sn string) (Datalogger, error) { - var dl Datalogger - err := q.db.GetContext(ctx, &dl, getDataloggerByModelSN, modelName, sn) - return dl, err -} - -const getDataloggerHashByModelSN = ` - SELECT "hash" FROM v_datalogger_hash - WHERE model = $1 AND sn = $2 -` - -func (q *Queries) GetDataloggerHashByModelSN(ctx context.Context, modelName, sn string) (string, error) { - var hash string - if err := q.db.GetContext(ctx, &hash, getDataloggerHashByModelSN, modelName, sn); err != nil { - return "", err - } - return hash, nil -} - -const createDataloggerTablePreview = ` - INSERT INTO datalogger_preview (datalogger_table_id, preview, update_date) VALUES ($1, $2, $3) -` - -func (q *Queries) CreateDataloggerTablePreview(ctx context.Context, prv DataloggerTablePreview) error { - _, err := q.db.ExecContext(ctx, createDataloggerTablePreview, prv.DataloggerTableID, prv.Preview, prv.UpdateDate) - return err -} - -const updateDataloggerTablePreview = ` - UPDATE datalogger_preview SET preview = $3, update_date = $4 - WHERE datalogger_table_id IN (SELECT id FROM datalogger_table WHERE datalogger_id = $1 AND table_name = $2) -` - -func (q *Queries) UpdateDataloggerTablePreview(ctx context.Context, dataloggerID uuid.UUID, tableName string, prv DataloggerTablePreview) error { - result, err := q.db.ExecContext(ctx, updateDataloggerTablePreview, dataloggerID, tableName, prv.Preview, prv.UpdateDate) - r, err := result.RowsAffected() - if err != nil { - return err - } - if r == 0 { - return sql.ErrNoRows - } - return err -} - -const deleteDataloggerTableError = ` - DELETE FROM datalogger_error - WHERE datalogger_table_id IN (SELECT id FROM datalogger_table WHERE datalogger_id = $1 AND table_name = $2) -` - -func (q *Queries) DeleteDataloggerTableError(ctx context.Context, dataloggerID uuid.UUID, tableName *string) error { - _, err := q.db.ExecContext(ctx, deleteDataloggerTableError, dataloggerID, tableName) - return err -} - -const createDataloggerError = ` - INSERT INTO datalogger_error (datalogger_table_id, error_message) - SELECT id, $3 FROM datalogger_table - WHERE datalogger_id = $1 AND table_name = $2 - AND NOT EXISTS ( - SELECT 1 FROM datalogger_table WHERE datalogger_id = $1 AND table_name = $2 - ); -` - -func (q *Queries) CreateDataloggerTableError(ctx context.Context, dataloggerID uuid.UUID, tableName *string, errMessage string) error { - _, err := q.db.ExecContext(ctx, createDataloggerError, dataloggerID, tableName, errMessage) - return err -} diff --git a/api/internal/model/db.go b/api/internal/model/db.go deleted file mode 100644 index 9174e4bc..00000000 --- a/api/internal/model/db.go +++ /dev/null @@ -1,143 +0,0 @@ -package model - -import ( - "context" - "database/sql" - "encoding/json" - "errors" - "fmt" - "log" - "time" - - "github.com/USACE/instrumentation-api/api/internal/config" - _ "github.com/jackc/pgx/v5/stdlib" - "github.com/jmoiron/sqlx" - "github.com/lib/pq" -) - -// DBTX includes all methods shared by sqlx.DB and sqlx.Tx, allowing -// either type to be used interchangeably. -// https://github.com/jmoiron/sqlx/pull/809 -type DBTX interface { - sqlx.Ext - sqlx.ExecerContext - sqlx.PreparerContext - sqlx.QueryerContext - sqlx.Preparer - - GetContext(context.Context, interface{}, string, ...interface{}) error - SelectContext(context.Context, interface{}, string, ...interface{}) error - Get(interface{}, string, ...interface{}) error - MustExecContext(context.Context, string, ...interface{}) sql.Result - PreparexContext(context.Context, string) (*sqlx.Stmt, error) - QueryRowContext(context.Context, string, ...interface{}) *sql.Row - Select(interface{}, string, ...interface{}) error - QueryRow(string, ...interface{}) *sql.Row - PrepareNamedContext(context.Context, string) (*sqlx.NamedStmt, error) - PrepareNamed(string) (*sqlx.NamedStmt, error) - Preparex(string) (*sqlx.Stmt, error) - NamedExec(string, interface{}) (sql.Result, error) - NamedExecContext(context.Context, string, interface{}) (sql.Result, error) - MustExec(string, ...interface{}) sql.Result - NamedQuery(string, interface{}) (*sqlx.Rows, error) -} - -type DBRows interface { - Close() error - Columns() ([]string, error) - ColumnTypes() ([]*sql.ColumnType, error) - Err() error - Next() bool - NextResultSet() bool - Scan(dest ...interface{}) error - SliceScan() ([]interface{}, error) - MapScan(dest map[string]interface{}) error - StructScan(dest interface{}) error -} - -type Tx interface { - Commit() error - Rollback() error -} - -var _ DBTX = (*sqlx.DB)(nil) -var _ DBTX = (*sqlx.Tx)(nil) -var _ DBRows = (*sqlx.Rows)(nil) -var _ Tx = (*sqlx.Tx)(nil) - -var sqlIn = sqlx.In - -type Database struct { - *sqlx.DB -} - -func (db *Database) Queries() *Queries { - return &Queries{db} -} - -type Queries struct { - db DBTX -} - -func (q *Queries) WithTx(tx *sqlx.Tx) *Queries { - return &Queries{ - db: tx, - } -} - -func TxDo(rollback func() error) { - err := rollback() - if err != nil && !errors.Is(err, sql.ErrTxDone) { - log.Print(err.Error()) - } -} - -func NewDatabase(cfg *config.DBConfig) *Database { - db, err := sqlx.Connect("pgx", cfg.ConnStr()) - if err != nil { - log.Fatalf("Could not connect to database: %s", err.Error()) - } - if db == nil { - log.Panicf("database is nil") - } - - db.SetMaxOpenConns(50) - db.SetMaxIdleConns(5) - db.SetConnMaxLifetime(time.Minute * 30) - - return &Database{db} -} - -// Some generic types to help sqlx scan arrays / json -type dbSlice[T any] []T - -func (d *dbSlice[T]) Scan(src interface{}) error { - value := make([]T, 0) - if err := pq.Array(&value).Scan(src); err != nil { - return err - } - *d = dbSlice[T](value) - return nil -} - -type dbJSONSlice[T any] []T - -func (d *dbJSONSlice[T]) Scan(src interface{}) error { - b, ok := src.(string) - if !ok { - return fmt.Errorf("failed type assertion") - } - return json.Unmarshal([]byte(b), d) -} - -func MapToStruct[T any](v map[string]interface{}) (T, error) { - var o T - s, err := json.Marshal(v) - if err != nil { - return o, err - } - if err := json.Unmarshal(s, &o); err != nil { - return o, err - } - return o, nil -} diff --git a/api/internal/model/district_rollup.go b/api/internal/model/district_rollup.go deleted file mode 100644 index 60ac63ed..00000000 --- a/api/internal/model/district_rollup.go +++ /dev/null @@ -1,56 +0,0 @@ -package model - -import ( - "context" - "time" - - "github.com/google/uuid" -) - -type DistrictRollup struct { - AlertTypeID uuid.UUID `json:"alert_type_id" db:"alert_type_id"` - OfficeID *uuid.UUID `json:"office_id" db:"office_id"` - DistrictInitials *string `json:"district_initials" db:"district_initials"` - ProjectName string `json:"project_name" db:"project_name"` - ProjectID uuid.UUID `json:"project_id" db:"project_id"` - Month time.Time `json:"month" db:"the_month"` - ExpectedTotalSubmittals int `json:"expected_total_submittals" db:"expected_total_submittals"` - ActualTotalSubmittals int `json:"actual_total_submittals" db:"actual_total_submittals"` - RedSubmittals int `json:"red_submittals" db:"red_submittals"` - YellowSubmittals int `json:"yellow_submittals" db:"yellow_submittals"` - GreenSubmittals int `json:"green_submittals" db:"green_submittals"` -} - -const listEvaluationDistrictRollup = ` - SELECT * FROM v_district_rollup - WHERE alert_type_id = 'da6ee89e-58cc-4d85-8384-43c3c33a68bd'::UUID - AND project_id = $1 - AND the_month >= DATE_TRUNC('month', $2::TIMESTAMPTZ) - AND the_month <= DATE_TRUNC('month', $3::TIMESTAMPTZ) -` - -// ListCollectionGroups lists all collection groups for a project -func (q *Queries) ListEvaluationDistrictRollup(ctx context.Context, opID uuid.UUID, tw TimeWindow) ([]DistrictRollup, error) { - dr := make([]DistrictRollup, 0) - if err := q.db.SelectContext(ctx, &dr, listEvaluationDistrictRollup, opID, tw.After, tw.Before); err != nil { - return nil, err - } - return dr, nil -} - -const listMeasurementDistrictRollup = ` - SELECT * FROM v_district_rollup - WHERE alert_type_id = '97e7a25c-d5c7-4ded-b272-1bb6e5914fe3'::UUID - AND project_id = $1 - AND the_month >= DATE_TRUNC('month', $2::TIMESTAMPTZ) - AND the_month <= DATE_TRUNC('month', $3::TIMESTAMPTZ) -` - -// ListCollectionGroups lists all collection groups for a project -func (q *Queries) ListMeasurementDistrictRollup(ctx context.Context, opID uuid.UUID, tw TimeWindow) ([]DistrictRollup, error) { - dr := make([]DistrictRollup, 0) - if err := q.db.SelectContext(ctx, &dr, listMeasurementDistrictRollup, opID, tw.After, tw.Before); err != nil { - return nil, err - } - return dr, nil -} diff --git a/api/internal/model/domains.go b/api/internal/model/domains.go deleted file mode 100644 index 2caeb54e..00000000 --- a/api/internal/model/domains.go +++ /dev/null @@ -1,60 +0,0 @@ -package model - -import ( - "context" - - "github.com/google/uuid" -) - -// Domain is a struct for returning all database domain values -type Domain struct { - ID uuid.UUID `json:"id" db:"id"` - Group string `json:"group" db:"group"` - Value string `json:"value" db:"value"` - Description *string `json:"description" db:"description"` -} - -type DomainGroup struct { - Group string `json:"group" db:"group"` - Opts dbJSONSlice[DomainGroupOption] `json:"opts" db:"opts"` -} - -type DomainGroupOption struct { - ID uuid.UUID `json:"id" db:"id"` - Value string `json:"value" db:"value"` - Description *string `json:"description" db:"description"` -} - -type DomainGroupCollection []DomainGroup - -type DomainMap map[string][]DomainGroupOption - -const getDomains = ` - SELECT * FROM v_domain -` - -// GetDomains returns a UNION of all domain tables in the database -func (q *Queries) GetDomains(ctx context.Context) ([]Domain, error) { - dd := make([]Domain, 0) - if err := q.db.SelectContext(ctx, &dd, getDomains); err != nil { - return nil, err - } - return dd, nil -} - -const getDomainMap = ` - SELECT * FROM v_domain_group -` - -// GetDomainsV2 returns all domains grouped by table -func (q *Queries) GetDomainMap(ctx context.Context) (DomainMap, error) { - dd := make([]DomainGroup, 0) - if err := q.db.SelectContext(ctx, &dd, getDomainMap); err != nil { - return nil, err - } - m := make(DomainMap) - for i := range dd { - m[dd[i].Group] = dd[i].Opts - } - return m, nil -} diff --git a/api/internal/model/equivalency_table.go b/api/internal/model/equivalency_table.go deleted file mode 100644 index 803a6e4f..00000000 --- a/api/internal/model/equivalency_table.go +++ /dev/null @@ -1,161 +0,0 @@ -package model - -import ( - "context" - "errors" - "fmt" - - "github.com/google/uuid" - "github.com/jackc/pgconn" - "github.com/jackc/pgerrcode" -) - -type EquivalencyTable struct { - DataloggerID uuid.UUID `json:"datalogger_id" db:"datalogger_id"` - DataloggerTableID uuid.UUID `json:"datalogger_table_id" db:"datalogger_table_id"` - DataloggerTableName string `json:"datalogger_table_name" db:"datalogger_table_name"` - Rows dbJSONSlice[EquivalencyTableRow] `json:"rows" db:"fields"` -} - -type EquivalencyTableRow struct { - ID uuid.UUID `json:"id" db:"id"` - FieldName string `json:"field_name" db:"field_name"` - DisplayName string `json:"display_name" db:"display_name"` - InstrumentID *uuid.UUID `json:"instrument_id" db:"instrument_id"` - TimeseriesID *uuid.UUID `json:"timeseries_id" db:"timeseries_id"` -} - -const getIsValidDataloggerTable = ` - SELECT NOT EXISTS ( - SELECT * FROM datalogger_table WHERE id = $1 AND table_name = 'preparse' - ) -` - -// GetIsValidDataloggerTable verifies that a datalogger table is not "preparse" (read-only) -func (q *Queries) GetIsValidDataloggerTable(ctx context.Context, dataloggerTableID uuid.UUID) error { - var isValid bool - if err := q.db.GetContext(ctx, &isValid, getIsValidDataloggerTable, dataloggerTableID); err != nil { - return err - } - if !isValid { - return fmt.Errorf("table preparse is read only %s", dataloggerTableID) - } - return nil -} - -const getIsValidEquivalencyTableTimeseries = ` - SELECT NOT EXISTS ( - SELECT id FROM v_timeseries_computed - WHERE id = $1 - UNION ALL - SELECT timeseries_id FROM instrument_constants - WHERE timeseries_id = $1 - ) -` - -// GetIsValidEquivalencyTableTimeseries verifies that a Timeseries is not computed or constant -func (q *Queries) GetIsValidEquivalencyTableTimeseries(ctx context.Context, tsID uuid.UUID) error { - var isValid bool - if err := q.db.GetContext(ctx, &isValid, getIsValidEquivalencyTableTimeseries, tsID); err != nil { - return err - } - if !isValid { - return fmt.Errorf("timeseries '%s' must not be computed or constant", tsID) - } - return nil -} - -const getEquivalencyTable = ` - SELECT - datalogger_id, - datalogger_table_id, - datalogger_table_name, - fields - FROM v_datalogger_equivalency_table - WHERE datalogger_table_id = $1 -` - -// GetEquivalencyTable returns a single Datalogger EquivalencyTable -func (q *Queries) GetEquivalencyTable(ctx context.Context, dataloggerTableID uuid.UUID) (EquivalencyTable, error) { - var et EquivalencyTable - err := q.db.GetContext(ctx, &et, getEquivalencyTable, dataloggerTableID) - return et, err -} - -const createOrUpdateEquivalencyTableRow = ` - INSERT INTO datalogger_equivalency_table - (datalogger_id, datalogger_table_id, field_name, display_name, instrument_id, timeseries_id) - VALUES ($1, $2, $3, $4, $5, $6) - ON CONFLICT ON CONSTRAINT datalogger_equivalency_table_datalogger_table_id_field_name_key - DO UPDATE SET display_name = EXCLUDED.display_name, instrument_id = EXCLUDED.instrument_id, timeseries_id = EXCLUDED.timeseries_id -` - -func (q *Queries) CreateOrUpdateEquivalencyTableRow(ctx context.Context, dataloggerID, dataloggerTableID uuid.UUID, tr EquivalencyTableRow) error { - if _, err := q.db.ExecContext(ctx, createOrUpdateEquivalencyTableRow, - dataloggerID, - dataloggerTableID, - tr.FieldName, - tr.DisplayName, - tr.InstrumentID, - tr.TimeseriesID, - ); err != nil { - return err - } - return nil -} - -const updateEquivalencyTableRow = ` - UPDATE datalogger_equivalency_table SET - field_name = $2, - display_name = $3, - instrument_id = $4, - timeseries_id = $5 - WHERE id = $1 -` - -func (q *Queries) UpdateEquivalencyTableRow(ctx context.Context, tr EquivalencyTableRow) error { - if _, err := q.db.ExecContext(ctx, updateEquivalencyTableRow, - tr.ID, - tr.FieldName, - tr.DisplayName, - tr.InstrumentID, - tr.TimeseriesID, - ); err != nil { - var pgErr *pgconn.PgError - if errors.As(err, &pgErr) && pgErr.Code == pgerrcode.UniqueViolation { - return fmt.Errorf("timeseries_id %s is already mapped to an active datalogger", tr.TimeseriesID) - } - return err - } - return nil -} - -const deleteEquivalencyTable = ` - DELETE FROM datalogger_equivalency_table WHERE datalogger_table_id = $1 -` - -// DeleteEquivalencyTable clears all rows of the EquivalencyTable for a datalogger table -func (q *Queries) DeleteEquivalencyTable(ctx context.Context, dataloggerTableID uuid.UUID) error { - _, err := q.db.ExecContext(ctx, deleteEquivalencyTable, dataloggerTableID) - return err -} - -const deleteEquivalencyTableRow = ` - DELETE FROM datalogger_equivalency_table WHERE id = $1 -` - -// DeleteEquivalencyTableRow deletes a single EquivalencyTable row by row id -func (q *Queries) DeleteEquivalencyTableRow(ctx context.Context, rowID uuid.UUID) error { - res, err := q.db.ExecContext(ctx, deleteEquivalencyTableRow, rowID) - if err != nil { - return err - } - count, err := res.RowsAffected() - if err != nil { - return err - } - if count == 0 { - return fmt.Errorf("row not found %s", rowID) - } - return nil -} diff --git a/api/internal/model/evaluation.go b/api/internal/model/evaluation.go deleted file mode 100644 index 62a1b0d8..00000000 --- a/api/internal/model/evaluation.go +++ /dev/null @@ -1,224 +0,0 @@ -package model - -import ( - "context" - "database/sql" - "fmt" - "time" - - "github.com/google/uuid" -) - -type Evaluation struct { - ID uuid.UUID `json:"id" db:"id"` - ProjectID uuid.UUID `json:"project_id" db:"project_id"` - ProjectName string `json:"project_name" db:"project_name"` - AlertConfigID *uuid.UUID `json:"alert_config_id" db:"alert_config_id"` - AlertConfigName *string `json:"alert_config_name" db:"alert_config_name"` - SubmittalID *uuid.UUID `json:"submittal_id" db:"submittal_id"` - Name string `json:"name" db:"name"` - Body string `json:"body" db:"body"` - StartDate time.Time `json:"start_date" db:"start_date"` - EndDate time.Time `json:"end_date" db:"end_date"` - Instruments dbJSONSlice[EvaluationInstrument] `json:"instruments" db:"instruments"` - AuditInfo -} - -type EvaluationInstrument struct { - InstrumentID uuid.UUID `json:"instrument_id" db:"instrument_id"` - InstrumentName string `json:"instrument_name" db:"instrument_name"` -} - -const listProjectEvaluations = ` - SELECT * - FROM v_evaluation - WHERE project_id = $1 -` - -func (q *Queries) ListProjectEvaluations(ctx context.Context, projectID uuid.UUID) ([]Evaluation, error) { - ee := make([]Evaluation, 0) - if err := q.db.SelectContext(ctx, &ee, listProjectEvaluations, projectID); err != nil { - return nil, err - } - return ee, nil -} - -const listProjectEvaluationsByAlertConfig = ` - SELECT * FROM v_evaluation - WHERE project_id = $1 - AND alert_config_id IS NOT NULL - AND alert_config_id = $2 -` - -func (q *Queries) ListProjectEvaluationsByAlertConfig(ctx context.Context, projectID, alertConfigID uuid.UUID) ([]Evaluation, error) { - ee := make([]Evaluation, 0) - err := q.db.SelectContext(ctx, &ee, listProjectEvaluationsByAlertConfig, projectID, alertConfigID) - if err != nil { - return make([]Evaluation, 0), err - } - return ee, nil -} - -const listInstrumentEvaluations = ` - SELECT * FROM v_evaluation - WHERE id = ANY( - SELECT evaluation_id - FROM evaluation_instrument - WHERE instrument_id = $1 - ) -` - -func (q *Queries) ListInstrumentEvaluations(ctx context.Context, instrumentID uuid.UUID) ([]Evaluation, error) { - ee := make([]Evaluation, 0) - if err := q.db.SelectContext(ctx, &ee, listInstrumentEvaluations, instrumentID); err != nil { - return nil, err - } - return ee, nil -} - -const getEvaluation = ` - SELECT * FROM v_evaluation WHERE id = $1 -` - -func (q *Queries) GetEvaluation(ctx context.Context, evaluationID uuid.UUID) (Evaluation, error) { - var e Evaluation - if err := q.db.GetContext(ctx, &e, getEvaluation, evaluationID); err != nil { - return e, err - } - return e, nil -} - -const completeEvaluationSubmittal = ` - UPDATE submittal sub1 SET - submittal_status_id = sq.submittal_status_id, - completion_date = NOW() - FROM ( - SELECT - sub2.id AS submittal_id, - CASE - -- if completed before due date, mark submittal as green id - WHEN NOW() <= sub2.due_date THEN '0c0d6487-3f71-4121-8575-19514c7b9f03'::UUID - -- if completed after due date, mark as yellow - ELSE 'ef9a3235-f6e2-4e6c-92f6-760684308f7f'::UUID - END AS submittal_status_id - FROM submittal sub2 - INNER JOIN alert_config ac ON sub2.alert_config_id = ac.id - WHERE sub2.id = $1 - AND sub2.completion_date IS NULL - AND NOT sub2.marked_as_missing - AND ac.alert_type_id = 'da6ee89e-58cc-4d85-8384-43c3c33a68bd'::UUID - ) sq - WHERE sub1.id = sq.submittal_id - RETURNING sub1.* -` - -func (q *Queries) CompleteEvaluationSubmittal(ctx context.Context, submittalID uuid.UUID) (Submittal, error) { - var sub Submittal - if err := q.db.GetContext(ctx, &sub, completeEvaluationSubmittal, submittalID); err != nil { - if err == sql.ErrNoRows { - return sub, fmt.Errorf("submittal must exist, be of evaluation type, and before due date or unvalidated missing") - } - return sub, err - } - return sub, nil -} - -const createNextEvaluationSubmittal = ` - INSERT INTO submittal (alert_config_id, due_date) - SELECT - ac.id, - NOW() + ac.schedule_interval - FROM alert_config ac - WHERE ac.id IN (SELECT alert_config_id FROM submittal WHERE id = $1) -` - -func (q *Queries) CreateNextEvaluationSubmittal(ctx context.Context, submittalID uuid.UUID) error { - _, err := q.db.ExecContext(ctx, createNextEvaluationSubmittal, submittalID) - return err -} - -const createEvaluation = ` - INSERT INTO evaluation ( - project_id, - submittal_id, - name, - body, - start_date, - end_date, - creator, - create_date - ) VALUES ($1,$2,$3,$4,$5,$6,$7,$8) - RETURNING id -` - -func (q *Queries) CreateEvaluation(ctx context.Context, ev Evaluation) (uuid.UUID, error) { - var evaluationID uuid.UUID - err := q.db.GetContext( - ctx, - &evaluationID, - createEvaluation, - ev.ProjectID, - ev.SubmittalID, - ev.Name, - ev.Body, - ev.StartDate, - ev.EndDate, - ev.CreatorID, - ev.CreateDate, - ) - return evaluationID, err -} - -const createEvalationInstrument = ` - INSERT INTO evaluation_instrument (evaluation_id, instrument_id) VALUES ($1,$2) -` - -func (q *Queries) CreateEvaluationInstrument(ctx context.Context, evaluationID, instrumentID uuid.UUID) error { - _, err := q.db.ExecContext(ctx, createEvalationInstrument, evaluationID, instrumentID) - return err -} - -const updateEvaluation = ` - UPDATE evaluation SET - name=$3, - body=$4, - start_date=$5, - end_date=$6, - updater=$7, - update_date=$8 - WHERE id=$1 AND project_id=$2 -` - -func (q *Queries) UpdateEvaluation(ctx context.Context, ev Evaluation) error { - _, err := q.db.ExecContext( - ctx, - updateEvaluation, - ev.ID, - ev.ProjectID, - ev.Name, - ev.Body, - ev.StartDate, - ev.EndDate, - ev.UpdaterID, - ev.UpdateDate, - ) - return err -} - -const unassignAllInstrumentsFromEvaluation = ` - DELETE FROM evaluation_instrument WHERE evaluation_id = $1 -` - -func (q *Queries) UnassignAllInstrumentsFromEvaluation(ctx context.Context, evaluationID uuid.UUID) error { - _, err := q.db.ExecContext(ctx, unassignAllInstrumentsFromEvaluation, evaluationID) - return err -} - -const deleteEvaluation = ` - DELETE FROM evaluation WHERE id = $1 -` - -func (q *Queries) DeleteEvaluation(ctx context.Context, evaluationID uuid.UUID) error { - _, err := q.db.ExecContext(ctx, deleteEvaluation, evaluationID) - return err -} diff --git a/api/internal/model/heartbeat.go b/api/internal/model/heartbeat.go deleted file mode 100644 index 1fd536a6..00000000 --- a/api/internal/model/heartbeat.go +++ /dev/null @@ -1,50 +0,0 @@ -package model - -import ( - "context" - "time" -) - -// Heartbeat is a timestamp -type Heartbeat struct { - Time time.Time `json:"time"` -} - -const doHeartbeat = ` - INSERT INTO heartbeat (time) VALUES ($1) RETURNING * -` - -// DoHeartbeat does regular-interval tasks -func (q *Queries) DoHeartbeat(ctx context.Context) (Heartbeat, error) { - var h Heartbeat - if err := q.db.GetContext(ctx, &h, doHeartbeat, time.Now().In(time.UTC)); err != nil { - return h, err - } - return h, nil -} - -const getLatestHeartbeat = ` - SELECT MAX(time) AS time FROM heartbeat -` - -// GetLatestHeartbeat returns the most recent system heartbeat -func (q *Queries) GetLatestHeartbeat(ctx context.Context) (Heartbeat, error) { - var h Heartbeat - if err := q.db.GetContext(ctx, &h, getLatestHeartbeat); err != nil { - return h, err - } - return h, nil -} - -const listHeartbeats = ` - SELECT * FROM heartbeat -` - -// ListHeartbeats returns all system heartbeats -func (q *Queries) ListHeartbeats(ctx context.Context) ([]Heartbeat, error) { - hh := make([]Heartbeat, 0) - if err := q.db.SelectContext(ctx, &hh, listHeartbeats); err != nil { - return nil, err - } - return hh, nil -} diff --git a/api/internal/model/home.go b/api/internal/model/home.go deleted file mode 100644 index f0854766..00000000 --- a/api/internal/model/home.go +++ /dev/null @@ -1,30 +0,0 @@ -package model - -import ( - "context" -) - -// Home is information for the homepage (landing page) -type Home struct { - InstrumentCount int `json:"instrument_count" db:"instrument_count"` - InstrumetGroupCount int `json:"instrument_group_count" db:"instrument_group_count"` - ProjectCount int `json:"project_count" db:"project_count"` - NewInstruments7D int `json:"new_instruments_7d" db:"new_instruments_7d"` - NewMeasurements2H int `json:"new_measurements_2h" db:"new_measurements_2h"` -} - -const getHome = ` - SELECT - (SELECT COUNT(*) FROM instrument WHERE NOT deleted) AS instrument_count, - (SELECT COUNT(*) FROM project WHERE NOT deleted) AS project_count, - (SELECT COUNT(*) FROM instrument_group) AS instrument_group_count, - (SELECT COUNT(*) FROM instrument WHERE NOT deleted AND create_date > NOW() - '7 days'::INTERVAL) AS new_instruments_7d, - (SELECT COUNT(*) FROM timeseries_measurement WHERE time > NOW() - '2 hours'::INTERVAL) AS new_measurements_2h -` - -// GetHome returns information for the homepage -func (q *Queries) GetHome(ctx context.Context) (Home, error) { - var home Home - err := q.db.GetContext(ctx, &home, getHome) - return home, err -} diff --git a/api/internal/model/instrument.go b/api/internal/model/instrument.go deleted file mode 100644 index dfc9e75f..00000000 --- a/api/internal/model/instrument.go +++ /dev/null @@ -1,322 +0,0 @@ -package model - -import ( - "context" - "database/sql/driver" - "encoding/json" - "fmt" - "time" - - "github.com/google/uuid" - - "github.com/paulmach/orb" - "github.com/paulmach/orb/encoding/wkb" - "github.com/paulmach/orb/geojson" -) - -// Instrument is an instrument -type Instrument struct { - ID uuid.UUID `json:"id"` - Slug string `json:"slug"` - Name string `json:"name"` - AwareID *uuid.UUID `json:"aware_id,omitempty"` - Groups dbSlice[uuid.UUID] `json:"groups" db:"groups"` - Constants dbSlice[uuid.UUID] `json:"constants" db:"constants"` - AlertConfigs dbSlice[uuid.UUID] `json:"alert_configs" db:"alert_configs"` - StatusID uuid.UUID `json:"status_id" db:"status_id"` - Status string `json:"status"` - StatusTime time.Time `json:"status_time" db:"status_time"` - Deleted bool `json:"-"` - TypeID uuid.UUID `json:"type_id" db:"type_id"` - Type string `json:"type"` - Icon *string `json:"icon" db:"icon"` - Geometry Geometry `json:"geometry,omitempty"` - Station *int `json:"station"` - StationOffset *int `json:"offset" db:"station_offset"` - Projects dbJSONSlice[IDSlugName] `json:"projects" db:"projects"` - NIDID *string `json:"nid_id" db:"nid_id"` - USGSID *string `json:"usgs_id" db:"usgs_id"` - HasCwms bool `json:"has_cwms" db:"has_cwms"` - ShowCwmsTab bool `json:"show_cwms_tab" db:"show_cwms_tab"` - Opts Opts `json:"opts" db:"opts"` - AuditInfo -} - -// Optional instrument metadata based on type -// If there are no options defined for the instrument type, the object will be empty -type Opts map[string]interface{} - -func (o *Opts) Scan(src interface{}) error { - b, ok := src.(string) - if !ok { - return fmt.Errorf("type assertion failed") - } - return json.Unmarshal([]byte(b), o) -} - -// InstrumentCollection is a collection of Instrument items -type InstrumentCollection []Instrument - -// Shorten returns an instrument collection with individual objects limited to ID and Struct fields -func (ic InstrumentCollection) Shorten() IDSlugCollection { - ss := IDSlugCollection{Items: make([]IDSlug, 0)} - for _, n := range ic { - s := IDSlug{ID: n.ID, Slug: n.Slug} - - ss.Items = append(ss.Items, s) - } - return ss -} - -type InstrumentCount struct { - InstrumentCount int `json:"instrument_count"` -} - -type InstrumentsProjectCount struct { - InstrumentID uuid.UUID `json:"instrument_id" db:"instrument_id"` - InstrumentName string `json:"instrument_name" db:"instrument_name"` - ProjectCount int `json:"project_count" db:"project_count"` -} - -type Geometry geojson.Geometry - -func (g Geometry) Value() (driver.Value, error) { - og := geojson.Geometry(g) - return wkb.Value(og.Geometry()), nil -} - -func (g *Geometry) Scan(src interface{}) error { - var p orb.Point - if err := wkb.Scanner(&p).Scan(src); err != nil { - return err - } - *g = Geometry(*geojson.NewGeometry(p)) - return nil -} - -func (g Geometry) MarshalJSON() ([]byte, error) { - gj := geojson.Geometry(g) - return gj.MarshalJSON() -} - -func (g *Geometry) UnmarshalJSON(data []byte) error { - gj, err := geojson.UnmarshalGeometry(data) - if err != nil { - return err - } - if gj == nil { - return fmt.Errorf("unable to unmarshal: geojson geometry is nil") - } - *g = Geometry(*gj) - return nil -} - -type InstrumentIDName struct { - ID uuid.UUID `json:"id"` - Name string `json:"name"` -} - -const listInstrumentsSQL = ` - SELECT - id, - deleted, - status_id, - status, - status_time, - slug, - name, - type_id, - type, - icon, - geometry, - station, - station_offset, - creator, - create_date, - updater, - update_date, - projects, - constants, - groups, - alert_configs, - nid_id, - usgs_id, - has_cwms, - show_cwms_tab, - opts - FROM v_instrument -` - -const listInstruments = listInstrumentsSQL + ` - WHERE NOT deleted -` - -// ListInstruments returns an array of instruments from the database -func (q *Queries) ListInstruments(ctx context.Context) ([]Instrument, error) { - ii := make([]Instrument, 0) - if err := q.db.SelectContext(ctx, &ii, listInstruments); err != nil { - return nil, err - } - return ii, nil -} - -const getInstrument = listInstrumentsSQL + ` - WHERE id = $1 -` - -// GetInstrument returns a single instrument -func (q *Queries) GetInstrument(ctx context.Context, instrumentID uuid.UUID) (Instrument, error) { - var i Instrument - err := q.db.GetContext(ctx, &i, getInstrument, instrumentID) - return i, err -} - -const getInstrumentCount = ` - SELECT COUNT(*) FROM instrument WHERE NOT deleted -` - -// GetInstrumentCount returns the number of instruments in the database -func (q *Queries) GetInstrumentCount(ctx context.Context) (InstrumentCount, error) { - var ic InstrumentCount - if err := q.db.GetContext(ctx, &ic.InstrumentCount, getInstrumentCount); err != nil { - return ic, err - } - return ic, nil -} - -const createInstrument = ` - INSERT INTO instrument (slug, name, type_id, geometry, station, station_offset, creator, create_date, nid_id, usgs_id, show_cwms_tab) - VALUES (slugify($1, 'instrument'), $1, $2, ST_SetSRID(ST_GeomFromWKB($3), 4326), $4, $5, $6, $7, $8, $9, $10) - RETURNING id, slug -` - -func (q *Queries) CreateInstrument(ctx context.Context, i Instrument) (IDSlugName, error) { - var aa IDSlugName - if err := q.db.GetContext( - ctx, &aa, createInstrument, - i.Name, i.TypeID, i.Geometry, i.Station, i.StationOffset, i.CreatorID, i.CreateDate, i.NIDID, i.USGSID, i.ShowCwmsTab, - ); err != nil { - return aa, err - } - return aa, nil -} - -const listAdminProjects = ` - SELECT pr.project_id FROM profile_project_roles pr - INNER JOIN role ro ON ro.id = pr.role_id - WHERE pr.profile_id = $1 - AND ro.name = 'ADMIN' -` - -func (q *Queries) ListAdminProjects(ctx context.Context, profileID uuid.UUID) ([]uuid.UUID, error) { - projectIDs := make([]uuid.UUID, 0) - err := q.db.SelectContext(ctx, &projectIDs, listAdminProjects, profileID) - return projectIDs, err -} - -const listInstrumentProjects = ` - SELECT project_id FROM project_instrument WHERE instrument_id = $1 -` - -func (q *Queries) ListInstrumentProjects(ctx context.Context, instrumentID uuid.UUID) ([]uuid.UUID, error) { - projectIDs := make([]uuid.UUID, 0) - err := q.db.SelectContext(ctx, &projectIDs, listInstrumentProjects, instrumentID) - return projectIDs, err -} - -const getProjectCountForInstrument = ` - SELECT pi.instrument_id, i.name AS instrument_name, COUNT(pi.*) AS project_count - FROM project_instrument pi - INNER JOIN instrument i ON pi.instrument_id = i.id - WHERE pi.instrument_id IN (?) - GROUP BY pi.instrument_id, i.name - ORDER BY i.name -` - -func (q *Queries) GetProjectCountForInstruments(ctx context.Context, instrumentIDs []uuid.UUID) ([]InstrumentsProjectCount, error) { - counts := make([]InstrumentsProjectCount, 0) - err := q.db.SelectContext(ctx, &counts, getProjectCountForInstrument, instrumentIDs) - return counts, err -} - -const updateInstrument = ` - UPDATE instrument SET - name = $3, - type_id = $4, - geometry = ST_GeomFromWKB($5), - updater = $6, - update_date = $7, - station = $8, - station_offset = $9, - nid_id = $10, - usgs_id = $11, - show_cwms_tab = $12 - WHERE id = $2 - AND id IN ( - SELECT instrument_id - FROM project_instrument - WHERE project_id = $1 - ) -` - -func (q *Queries) UpdateInstrument(ctx context.Context, projectID uuid.UUID, i Instrument) error { - _, err := q.db.ExecContext( - ctx, updateInstrument, - projectID, i.ID, i.Name, i.TypeID, i.Geometry, - i.UpdaterID, i.UpdateDate, i.Station, i.StationOffset, i.NIDID, i.USGSID, i.ShowCwmsTab, - ) - return err -} - -const updateInstrumentGeometry = ` - UPDATE instrument SET - geometry = ST_GeomFromWKB($3), - updater = $4, - update_date = NOW() - WHERE id = $2 - AND id IN ( - SELECT instrument_id - FROM project_instrument - WHERE project_id = $1 - ) - RETURNING id -` - -// UpdateInstrumentGeometry updates instrument geometry property -func (q *Queries) UpdateInstrumentGeometry(ctx context.Context, projectID, instrumentID uuid.UUID, geom geojson.Geometry, p Profile) error { - _, err := q.db.ExecContext(ctx, updateInstrumentGeometry, projectID, instrumentID, wkb.Value(geom.Geometry()), p.ID) - return err -} - -const deleteFlagInstrument = ` - UPDATE instrument SET deleted = true - WHERE id = ANY( - SELECT instrument_id - FROM project_instrument - WHERE project_id = $1 - ) - AND id = $2 -` - -// DeleteFlagInstrument changes delete flag to true -func (q *Queries) DeleteFlagInstrument(ctx context.Context, projectID, instrumentID uuid.UUID) error { - _, err := q.db.ExecContext(ctx, deleteFlagInstrument, projectID, instrumentID) - return err -} - -const listInstrumentIDNamesByIDs = ` - SELECT id, name - FROM instrument - WHERE id IN (?) - AND NOT deleted -` - -func (q *Queries) ListInstrumentIDNamesByIDs(ctx context.Context, instrumentIDs []uuid.UUID) ([]InstrumentIDName, error) { - query, args, err := sqlIn(listInstrumentIDNamesByIDs, instrumentIDs) - if err != nil { - return nil, err - } - ii := make([]InstrumentIDName, 0) - err = q.db.SelectContext(ctx, &ii, q.db.Rebind(query), args...) - return ii, err -} diff --git a/api/internal/model/instrument_assign.go b/api/internal/model/instrument_assign.go deleted file mode 100644 index d4c7e37c..00000000 --- a/api/internal/model/instrument_assign.go +++ /dev/null @@ -1,220 +0,0 @@ -package model - -import ( - "context" - "fmt" - - "github.com/google/uuid" -) - -type ReasonCode int - -const ( - None ReasonCode = iota - Unauthorized - InvalidName - InvalidUnassign -) - -type InstrumentsValidation struct { - ReasonCode ReasonCode `json:"-"` - IsValid bool `json:"is_valid"` - Errors []string `json:"errors"` -} - -type ProjectInstrumentAssignments struct { - InstrumentIDs []uuid.UUID `json:"instrument_ids"` -} - -type InstrumentProjectAssignments struct { - ProjectIDs []uuid.UUID `json:"project_ids"` -} - -const assignInstrumentToProject = ` - INSERT INTO project_instrument (project_id, instrument_id) VALUES ($1, $2) - ON CONFLICT ON CONSTRAINT project_instrument_project_id_instrument_id_key DO NOTHING -` - -func (q *Queries) AssignInstrumentToProject(ctx context.Context, projectID, instrumentID uuid.UUID) error { - _, err := q.db.ExecContext(ctx, assignInstrumentToProject, projectID, instrumentID) - return err -} - -const unassignInstrumentFromProject = ` - DELETE FROM project_instrument WHERE project_id = $1 AND instrument_id = $2 -` - -func (q *Queries) UnassignInstrumentFromProject(ctx context.Context, projectID, instrumentID uuid.UUID) error { - _, err := q.db.ExecContext(ctx, unassignInstrumentFromProject, projectID, instrumentID) - return err -} - -const validateInstrumentNamesProjectUnique = ` - SELECT i.name - FROM project_instrument pi - INNER JOIN instrument i ON pi.instrument_id = i.id - WHERE pi.project_id = ? - AND i.name IN (?) - AND NOT i.deleted -` - -// ValidateInstrumentNamesProjectUnique checks that the provided instrument names do not already belong to a project -func (q *Queries) ValidateInstrumentNamesProjectUnique(ctx context.Context, projectID uuid.UUID, instrumentNames []string) (InstrumentsValidation, error) { - var v InstrumentsValidation - query, args, err := sqlIn(validateInstrumentNamesProjectUnique, projectID, instrumentNames) - if err != nil { - return v, err - } - var nn []struct { - Name string `db:"name"` - } - if err := q.db.SelectContext(ctx, &nn, q.db.Rebind(query), args...); err != nil { - return v, err - } - if len(nn) != 0 { - vErrors := make([]string, len(nn)) - for idx := range nn { - vErrors[idx] = fmt.Sprintf( - "Instrument name '%s' is already taken. Instrument names must be unique within associated projects", - nn[idx].Name, - ) - } - v.Errors = vErrors - v.ReasonCode = InvalidName - } else { - v.IsValid = true - v.Errors = make([]string, 0) - } - return v, nil -} - -const validateProjectsInstrumentNameUnique = ` - SELECT p.name, i.name - FROM project_instrument pi - INNER JOIN instrument i ON pi.instrument_id = i.id - INNER JOIN project p ON pi.project_id = p.id - WHERE i.name = ? - AND pi.instrument_id IN (?) - AND NOT i.deleted - ORDER BY pi.project_id -` - -// ValidateProjectsInstrumentNameUnique checks that the provided instrument name does not already belong to one of the provided projects -func (q *Queries) ValidateProjectsInstrumentNameUnique(ctx context.Context, instrumentName string, projectIDs []uuid.UUID) (InstrumentsValidation, error) { - var v InstrumentsValidation - query, args, err := sqlIn(validateProjectsInstrumentNameUnique, instrumentName, projectIDs) - if err != nil { - return v, err - } - var nn []struct { - Name string `db:"name"` - } - if err := q.db.SelectContext(ctx, &nn, q.db.Rebind(query), args...); err != nil { - return v, err - } - if len(nn) != 0 { - vErrors := make([]string, len(nn)) - for idx := range nn { - vErrors[idx] = fmt.Sprintf( - "Instrument name '%s' is already taken. Instrument names must be unique within associated projects", - nn[idx].Name, - ) - } - v.Errors = vErrors - v.ReasonCode = InvalidName - } else { - v.IsValid = true - v.Errors = make([]string, 0) - } - return v, nil -} - -// case where service provides slice of instrument ids for single project -const validateInstrumentsAssignerAuthorized = ` - SELECT p.name AS project_name, i.name AS instrument_name - FROM project_instrument pi - INNER JOIN project p ON pi.project_id = p.id - INNER JOIN instrument i ON pi.instrument_id = i.id - WHERE pi.instrument_id IN (?) - AND NOT EXISTS ( - SELECT 1 FROM v_profile_project_roles ppr - WHERE ppr.profile_id = ? - AND (ppr.is_admin OR (ppr.project_id = pi.project_id AND ppr.role = 'ADMIN')) - ) - AND NOT i.deleted -` - -func (q *Queries) ValidateInstrumentsAssignerAuthorized(ctx context.Context, profileID uuid.UUID, instrumentIDs []uuid.UUID) (InstrumentsValidation, error) { - var v InstrumentsValidation - query, args, err := sqlIn(validateInstrumentsAssignerAuthorized, instrumentIDs, profileID) - if err != nil { - return v, err - } - var nn []struct { - ProjectName string `db:"project_name"` - InstrumentName string `db:"instrument_name"` - } - if err := q.db.SelectContext(ctx, &nn, q.db.Rebind(query), args...); err != nil { - return v, err - } - if len(nn) != 0 { - vErrors := make([]string, len(nn)) - for idx := range nn { - vErrors[idx] = fmt.Sprintf( - "Cannot assign instrument '%s' because is assigned to another project '%s' which the user is not an ADMIN of", - nn[idx].InstrumentName, nn[idx].ProjectName, - ) - } - v.Errors = vErrors - v.ReasonCode = Unauthorized - } else { - v.IsValid = true - v.Errors = make([]string, 0) - } - return v, err -} - -// case where service provides slice of project ids for single instrument -const validateProjectsAssignerAuthorized = ` - SELECT p.name - FROM project_instrument pi - INNER JOIN project p ON pi.project_id = p.id - INNER JOIN instrument i ON pi.instrument_id = i.id - WHERE pi.instrument_id = ? - AND pi.project_id IN (?) - AND NOT EXISTS ( - SELECT 1 FROM v_profile_project_roles ppr - WHERE profile_id = ? AND (ppr.is_admin OR (ppr.project_id = pi.project_id AND ppr.role = 'ADMIN')) - ) - AND NOT i.deleted - ORDER BY p.name -` - -func (q *Queries) ValidateProjectsAssignerAuthorized(ctx context.Context, profileID, instrumentID uuid.UUID, projectIDs []uuid.UUID) (InstrumentsValidation, error) { - var v InstrumentsValidation - query, args, err := sqlIn(validateProjectsAssignerAuthorized, instrumentID, projectIDs, profileID) - if err != nil { - return v, err - } - var nn []struct { - Name string `db:"name"` - } - if err := q.db.SelectContext(ctx, &nn, q.db.Rebind(query), args...); err != nil { - return v, err - } - if len(nn) != 0 { - vErrors := make([]string, len(nn)) - for idx := range nn { - vErrors[idx] = fmt.Sprintf( - "Cannot assign instrument to project '%s' because the user is not an ADMIN of this project", - nn[idx].Name, - ) - } - v.Errors = vErrors - v.ReasonCode = Unauthorized - } else { - v.IsValid = true - v.Errors = make([]string, 0) - } - return v, err -} diff --git a/api/internal/model/instrument_constant.go b/api/internal/model/instrument_constant.go deleted file mode 100644 index dbc4fbcc..00000000 --- a/api/internal/model/instrument_constant.go +++ /dev/null @@ -1,40 +0,0 @@ -package model - -import ( - "context" - - "github.com/google/uuid" -) - -const listInstrumentConstants = ` - SELECT t.* FROM v_timeseries t - INNER JOIN instrument_constants ic ON ic.timeseries_id = t.id - WHERE ic.instrument_id = $1 -` - -// ListInstrumentConstants lists constants for a given instrument id -func (q *Queries) ListInstrumentConstants(ctx context.Context, instrumentID uuid.UUID) ([]Timeseries, error) { - tt := make([]Timeseries, 0) - if err := q.db.SelectContext(ctx, &tt, listInstrumentConstants, instrumentID); err != nil { - return tt, err - } - return tt, nil -} - -const createInstrumentConstant = ` - INSERT INTO instrument_constants (instrument_id, timeseries_id) VALUES ($1, $2) -` - -func (q *Queries) CreateInstrumentConstant(ctx context.Context, instrumentID, timeseriesID uuid.UUID) error { - _, err := q.db.ExecContext(ctx, createInstrumentConstant, instrumentID, timeseriesID) - return err -} - -const deleteInstrumentConstant = ` - DELETE FROM instrument_constants WHERE instrument_id = $1 AND timeseries_id = $2 -` - -func (q *Queries) DeleteInstrumentConstant(ctx context.Context, instrumentID, timeseriesID uuid.UUID) error { - _, err := q.db.ExecContext(ctx, deleteInstrumentConstant, instrumentID, timeseriesID) - return err -} diff --git a/api/internal/model/instrument_group.go b/api/internal/model/instrument_group.go deleted file mode 100644 index cbd257ee..00000000 --- a/api/internal/model/instrument_group.go +++ /dev/null @@ -1,182 +0,0 @@ -package model - -import ( - "context" - "encoding/json" - - "github.com/USACE/instrumentation-api/api/internal/util" - "github.com/google/uuid" -) - -// InstrumentGroup holds information for entity instrument_group -type InstrumentGroup struct { - ID uuid.UUID `json:"id"` - Deleted bool `json:"-"` - Slug string `json:"slug"` - Name string `json:"name"` - Description string `json:"description"` - ProjectID *uuid.UUID `json:"project_id" db:"project_id"` - InstrumentCount int `json:"instrument_count" db:"instrument_count"` - TimeseriesCount int `json:"timeseries_count" db:"timeseries_count"` - AuditInfo -} - -// InstrumentGroupCollection is a collection of Instrument items -type InstrumentGroupCollection struct { - Items []InstrumentGroup -} - -// Shorten returns an instrument collection with individual objects limited to ID and Struct fields -func (c InstrumentGroupCollection) Shorten() IDSlugCollection { - ss := IDSlugCollection{Items: make([]IDSlug, 0)} - for _, n := range c.Items { - s := IDSlug{ID: n.ID, Slug: n.Slug} - ss.Items = append(ss.Items, s) - } - return ss -} - -// UnmarshalJSON implements UnmarshalJSON interface -// Allows unpacking object or array of objects into array of objects -func (c *InstrumentGroupCollection) UnmarshalJSON(b []byte) error { - switch util.JSONType(b) { - case "ARRAY": - if err := json.Unmarshal(b, &c.Items); err != nil { - return err - } - case "OBJECT": - var g InstrumentGroup - if err := json.Unmarshal(b, &g); err != nil { - return err - } - c.Items = []InstrumentGroup{g} - default: - c.Items = make([]InstrumentGroup, 0) - } - return nil -} - -const listInstrumentGroupsSQL = ` - SELECT - id, - slug, - name, - description, - creator, - create_date, - updater, - update_date, - project_id, - instrument_count, - timeseries_count - FROM v_instrument_group -` - -const listInstrumentGroups = listInstrumentGroupsSQL + ` - WHERE NOT deleted -` - -// ListInstrumentGroups returns a list of instrument groups -func (q *Queries) ListInstrumentGroups(ctx context.Context) ([]InstrumentGroup, error) { - gg := make([]InstrumentGroup, 0) - if err := q.db.SelectContext(ctx, &gg, listInstrumentGroups); err != nil { - return make([]InstrumentGroup, 0), err - } - return gg, nil -} - -const getInstrumentGroup = listInstrumentGroupsSQL + ` - WHERE id = $1 -` - -// GetInstrumentGroup returns a single instrument group -func (q *Queries) GetInstrumentGroup(ctx context.Context, instrumentGroupID uuid.UUID) (InstrumentGroup, error) { - var g InstrumentGroup - if err := q.db.GetContext(ctx, &g, getInstrumentGroup, instrumentGroupID); err != nil { - return g, err - } - return g, nil -} - -const createInstrumentGroup = ` - INSERT INTO instrument_group (slug, name, description, creator, create_date, project_id) - VALUES (slugify($1, 'instrument_group'), $1, $2, $3, $4, $5) - RETURNING id, slug, name, description, creator, create_date, updater, update_date, project_id -` - -func (q *Queries) CreateInstrumentGroup(ctx context.Context, group InstrumentGroup) (InstrumentGroup, error) { - var groupNew InstrumentGroup - err := q.db.GetContext( - ctx, &groupNew, createInstrumentGroup, - group.Name, group.Description, group.CreatorID, group.CreateDate, group.ProjectID, - ) - return groupNew, err -} - -const updateInstrumentGroup = ` - UPDATE instrument_group SET - name = $2, - deleted = $3, - description = $4, - updater = $5, - update_date = $6, - project_id = $7 - WHERE id = $1 - RETURNING * -` - -// UpdateInstrumentGroup updates an instrument group -func (q *Queries) UpdateInstrumentGroup(ctx context.Context, group InstrumentGroup) (InstrumentGroup, error) { - var groupUpdated InstrumentGroup - err := q.db.GetContext( - ctx, &groupUpdated, updateInstrumentGroup, - group.ID, group.Name, group.Deleted, group.Description, group.UpdaterID, group.UpdateDate, group.ProjectID, - ) - return groupUpdated, err -} - -const deleteFlagInstrumentGroup = ` - UPDATE instrument_group SET deleted = true WHERE id = $1 -` - -// DeleteFlagInstrumentGroup sets the deleted field to true -func (q *Queries) DeleteFlagInstrumentGroup(ctx context.Context, instrumentGroupID uuid.UUID) error { - _, err := q.db.ExecContext(ctx, deleteFlagInstrumentGroup, instrumentGroupID) - return err -} - -const listInstrumentGroupInstruments = ` - SELECT inst.* - FROM instrument_group_instruments igi - INNER JOIN (` + listInstrumentsSQL + `) inst ON igi.instrument_id = inst.id - WHERE igi.instrument_group_id = $1 and inst.deleted = false -` - -// ListInstrumentGroupInstruments returns a list of instrument group instruments for a given instrument -func (q *Queries) ListInstrumentGroupInstruments(ctx context.Context, groupID uuid.UUID) ([]Instrument, error) { - ii := make([]Instrument, 0) - if err := q.db.SelectContext(ctx, &ii, listInstrumentGroupInstruments, groupID); err != nil { - return nil, err - } - return ii, nil -} - -const createInstrumentGroupInstruments = ` - INSERT INTO instrument_group_instruments (instrument_group_id, instrument_id) VALUES ($1, $2) -` - -// CreateInstrumentGroupInstruments adds an instrument to an instrument group -func (q *Queries) CreateInstrumentGroupInstruments(ctx context.Context, instrumentGroupID uuid.UUID, instrumentID uuid.UUID) error { - _, err := q.db.ExecContext(ctx, createInstrumentGroupInstruments, instrumentGroupID, instrumentID) - return err -} - -const deleteInstrumentGroupInstruments = ` - DELETE FROM instrument_group_instruments WHERE instrument_group_id = $1 and instrument_id = $2 -` - -// DeleteInstrumentGroupInstruments adds an instrument to an instrument group -func (q *Queries) DeleteInstrumentGroupInstruments(ctx context.Context, instrumentGroupID uuid.UUID, instrumentID uuid.UUID) error { - _, err := q.db.ExecContext(ctx, deleteInstrumentGroupInstruments, instrumentGroupID, instrumentID) - return err -} diff --git a/api/internal/model/instrument_ipi.go b/api/internal/model/instrument_ipi.go deleted file mode 100644 index d20f5481..00000000 --- a/api/internal/model/instrument_ipi.go +++ /dev/null @@ -1,141 +0,0 @@ -package model - -import ( - "context" - "time" - - "github.com/google/uuid" -) - -type IpiOpts struct { - InstrumentID uuid.UUID `json:"-" db:"instrument_id"` - NumSegments int `json:"num_segments" db:"num_segments"` - BottomElevationTimeseriesID uuid.UUID `json:"bottom_elevation_timeseries_id" db:"bottom_elevation_timeseries_id"` - BottomElevation float64 `json:"bottom_elevation" db:"bottom_elevation"` - InitialTime *time.Time `json:"initial_time" db:"initial_time"` -} - -type IpiSegment struct { - ID int `json:"id" db:"id"` - InstrumentID uuid.UUID `json:"instrument_id" db:"instrument_id"` - Length *float64 `json:"length" db:"length"` - LengthTimeseriesID uuid.UUID `json:"length_timeseries_id" db:"length_timeseries_id"` - TiltTimeseriesID *uuid.UUID `json:"tilt_timeseries_id" db:"tilt_timeseries_id"` - IncDevTimeseriesID *uuid.UUID `json:"inc_dev_timeseries_id" db:"inc_dev_timeseries_id"` - TempTimeseriesID *uuid.UUID `json:"temp_timeseries_id" db:"temp_timeseries_id"` -} - -type IpiMeasurements struct { - InstrumentID uuid.UUID `json:"-" db:"instrument_id"` - Time time.Time `json:"time" db:"time"` - Measurements dbJSONSlice[IpiSegmentMeasurement] `json:"measurements" db:"measurements"` -} - -type IpiSegmentMeasurement struct { - SegmentID int `json:"segment_id" db:"segment_id"` - Tilt *float64 `json:"tilt" db:"tilt"` - IncDev *float64 `json:"inc_dev" db:"inc_dev"` - CumDev *float64 `json:"cum_dev" db:"cum_dev"` - Temp *float64 `json:"temp" db:"temp"` - Elelvation *float64 `json:"elevation" db:"elevation"` -} - -var ( - IpiParameterID = uuid.MustParse("a9a5ad45-b2e5-4744-816e-d3184f2c08bd") -) - -// TODO: when creating new timeseries, any depth based instruments should not be available for assignment - -const createIpiOpts = ` - INSERT INTO ipi_opts (instrument_id, num_segments, bottom_elevation_timeseries_id, initial_time) - VALUES ($1, $2, $3, $4) -` - -func (q *Queries) CreateIpiOpts(ctx context.Context, instrumentID uuid.UUID, si IpiOpts) error { - _, err := q.db.ExecContext(ctx, createIpiOpts, instrumentID, si.NumSegments, si.BottomElevationTimeseriesID, si.InitialTime) - return err -} - -const updateIpiOpts = ` - UPDATE ipi_opts SET - bottom_elevation_timeseries_id = $2, - initial_time = $3 - WHERE instrument_id = $1 -` - -func (q *Queries) UpdateIpiOpts(ctx context.Context, instrumentID uuid.UUID, si IpiOpts) error { - _, err := q.db.ExecContext(ctx, updateIpiOpts, instrumentID, si.BottomElevationTimeseriesID, si.InitialTime) - return err -} - -const getAllIpiSegmentsForInstrument = ` - SELECT * FROM v_ipi_segment WHERE instrument_id = $1 -` - -func (q *Queries) GetAllIpiSegmentsForInstrument(ctx context.Context, instrumentID uuid.UUID) ([]IpiSegment, error) { - ssi := make([]IpiSegment, 0) - err := q.db.SelectContext(ctx, &ssi, getAllIpiSegmentsForInstrument, instrumentID) - return ssi, err -} - -const createIpiSegment = ` - INSERT INTO ipi_segment ( - id, - instrument_id, - length_timeseries_id, - tilt_timeseries_id, - inc_dev_timeseries_id, - temp_timeseries_id - ) VALUES ($1, $2, $3, $4, $5, $6) -` - -func (q *Queries) CreateIpiSegment(ctx context.Context, seg IpiSegment) error { - _, err := q.db.ExecContext(ctx, createIpiSegment, - seg.ID, - seg.InstrumentID, - seg.LengthTimeseriesID, - seg.TiltTimeseriesID, - seg.IncDevTimeseriesID, - seg.TempTimeseriesID, - ) - return err -} - -const updateIpiSegment = ` - UPDATE ipi_segment SET - length_timeseries_id = $3, - tilt_timeseries_id = $4, - inc_dev_timeseries_id = $5, - temp_timeseries_id = $6 - WHERE id = $1 AND instrument_id = $2 -` - -func (q *Queries) UpdateIpiSegment(ctx context.Context, seg IpiSegment) error { - _, err := q.db.ExecContext(ctx, updateIpiSegment, - seg.ID, - seg.InstrumentID, - seg.LengthTimeseriesID, - seg.TiltTimeseriesID, - seg.IncDevTimeseriesID, - seg.TempTimeseriesID, - ) - return err -} - -const getIpiMeasurementsForInstrument = ` - SELECT instrument_id, time, measurements - FROM v_ipi_measurement - WHERE instrument_id = $1 AND time >= $2 AND time <= $3 - UNION - SELECT instrument_id, time, measurements - FROM v_ipi_measurement - WHERE time IN (SELECT initial_time FROM ipi_opts WHERE instrument_id = $1) - AND instrument_id = $1 - ORDER BY time ASC -` - -func (q *Queries) GetIpiMeasurementsForInstrument(ctx context.Context, instrumentID uuid.UUID, tw TimeWindow) ([]IpiMeasurements, error) { - mm := make([]IpiMeasurements, 0) - err := q.db.SelectContext(ctx, &mm, getIpiMeasurementsForInstrument, instrumentID, tw.After, tw.Before) - return mm, err -} diff --git a/api/internal/model/instrument_note.go b/api/internal/model/instrument_note.go deleted file mode 100644 index 582ff1f7..00000000 --- a/api/internal/model/instrument_note.go +++ /dev/null @@ -1,134 +0,0 @@ -package model - -import ( - "context" - "encoding/json" - "time" - - "github.com/USACE/instrumentation-api/api/internal/util" - "github.com/google/uuid" -) - -// InstrumentNote is a note about an instrument -type InstrumentNote struct { - ID uuid.UUID `json:"id"` - InstrumentID uuid.UUID `json:"instrument_id" db:"instrument_id"` - Title string `json:"title"` - Body string `json:"body"` - Time time.Time `json:"time"` - AuditInfo -} - -// InstrumentNoteCollection is a collection of Instrument Notes -type InstrumentNoteCollection struct { - Items []InstrumentNote -} - -// UnmarshalJSON implements UnmarshalJSON interface -// Allows unpacking object or array of objects into array of objects -func (c *InstrumentNoteCollection) UnmarshalJSON(b []byte) error { - switch util.JSONType(b) { - case "ARRAY": - if err := json.Unmarshal(b, &c.Items); err != nil { - return err - } - case "OBJECT": - var n InstrumentNote - if err := json.Unmarshal(b, &n); err != nil { - return err - } - c.Items = []InstrumentNote{n} - default: - c.Items = make([]InstrumentNote, 0) - } - return nil -} - -const listInstrumentNotes = ` - SELECT - N.id AS id, - N.instrument_id AS instrument_id, - N.title, - N.body, - N.time, - N.creator, - N.create_date, - N.updater, - N.update_date - FROM instrument_note N -` - -// ListInstrumentNotes returns an array of instruments from the database -func (q *Queries) ListInstrumentNotes(ctx context.Context) ([]InstrumentNote, error) { - nn := make([]InstrumentNote, 0) - if err := q.db.SelectContext(ctx, &nn, listInstrumentNotes); err != nil { - return nil, err - } - return nn, nil -} - -const listInstrumentInstrumentNotes = listInstrumentNotes + ` - WHERE N.instrument_id = $1 -` - -// ListInstrumentInstrumentNotes returns an array of instrument notes for a given instrument -func (q *Queries) ListInstrumentInstrumentNotes(ctx context.Context, instrumentID uuid.UUID) ([]InstrumentNote, error) { - nn := make([]InstrumentNote, 0) - if err := q.db.SelectContext(ctx, &nn, listInstrumentInstrumentNotes, instrumentID); err != nil { - return nil, err - } - return nn, nil -} - -const getInstrumentNotes = listInstrumentNotes + ` - WHERE N.id = $1 -` - -// GetInstrumentNote returns a single instrument note -func (q *Queries) GetInstrumentNote(ctx context.Context, noteID uuid.UUID) (InstrumentNote, error) { - var n InstrumentNote - if err := q.db.GetContext(ctx, &n, getInstrumentNotes, noteID); err != nil { - return n, err - } - return n, nil -} - -const createInstrumentNote = ` - INSERT INTO instrument_note (instrument_id, title, body, time, creator, create_date) - VALUES ($1, $2, $3, $4, $5, $6) - RETURNING id, instrument_id, title, body, time, creator, create_date, updater, update_date -` - -func (q *Queries) CreateInstrumentNote(ctx context.Context, note InstrumentNote) (InstrumentNote, error) { - var noteNew InstrumentNote - err := q.db.GetContext(ctx, ¬eNew, createInstrumentNote, note.InstrumentID, note.Title, note.Body, note.Time, note.CreatorID, note.CreateDate) - return noteNew, err -} - -const updateInstrumentNote = ` - UPDATE instrument_note SET - title = $2, - body = $3, - time = $4, - updater = $5, - update_date = $6 - WHERE id = $1 - RETURNING id, instrument_id, title, body, time, creator, create_date, updater, update_date -` - -// UpdateInstrumentNote updates a single instrument note -func (q *Queries) UpdateInstrumentNote(ctx context.Context, n InstrumentNote) (InstrumentNote, error) { - var nUpdated InstrumentNote - err := q.db.GetContext(ctx, &nUpdated, updateInstrumentNote, n.ID, n.Title, n.Body, n.Time, n.UpdaterID, n.UpdateDate) - return nUpdated, err -} - -const deleteInstrumentNote = ` - DELETE FROM instrument_note WHERE id = $1 -` - -// DeleteInstrumentNote deletes an instrument note -func (q *Queries) DeleteInstrumentNote(ctx context.Context, noteID uuid.UUID) error { - _, err := q.db.ExecContext(ctx, deleteInstrumentNote, noteID) - return err -} diff --git a/api/internal/model/instrument_saa.go b/api/internal/model/instrument_saa.go deleted file mode 100644 index b461670a..00000000 --- a/api/internal/model/instrument_saa.go +++ /dev/null @@ -1,154 +0,0 @@ -package model - -import ( - "context" - "time" - - "github.com/google/uuid" -) - -type SaaOpts struct { - InstrumentID uuid.UUID `json:"-" db:"instrument_id"` - NumSegments int `json:"num_segments" db:"num_segments"` - BottomElevationTimeseriesID uuid.UUID `json:"bottom_elevation_timeseries_id" db:"bottom_elevation_timeseries_id"` - BottomElevation float64 `json:"bottom_elevation" db:"bottom_elevation"` - InitialTime *time.Time `json:"initial_time" db:"initial_time"` -} - -type SaaSegment struct { - ID int `json:"id" db:"id"` - InstrumentID uuid.UUID `json:"instrument_id" db:"instrument_id"` - Length *float64 `json:"length" db:"length"` - LengthTimeseriesID uuid.UUID `json:"length_timeseries_id" db:"length_timeseries_id"` - XTimeseriesID *uuid.UUID `json:"x_timeseries_id" db:"x_timeseries_id"` - YTimeseriesID *uuid.UUID `json:"y_timeseries_id" db:"y_timeseries_id"` - ZTimeseriesID *uuid.UUID `json:"z_timeseries_id" db:"z_timeseries_id"` - TempTimeseriesID *uuid.UUID `json:"temp_timeseries_id" db:"temp_timeseries_id"` -} - -type SaaMeasurements struct { - InstrumentID uuid.UUID `json:"-" db:"instrument_id"` - Time time.Time `json:"time" db:"time"` - Measurements dbJSONSlice[SaaSegmentMeasurement] `json:"measurements" db:"measurements"` -} - -type SaaSegmentMeasurement struct { - SegmentID int `json:"segment_id" db:"segment_id"` - X *float64 `json:"x" db:"x"` - Y *float64 `json:"y" db:"y"` - Z *float64 `json:"z" db:"z"` - Temp *float64 `json:"temp" db:"temp"` - XIncrement *float64 `json:"x_increment" db:"x_increment"` - YIncrement *float64 `json:"y_increment" db:"y_increment"` - ZIncrement *float64 `json:"z_increment" db:"z_increment"` - TempIncrement *float64 `json:"temp_increment" db:"temp_increment"` - XCumDev *float64 `json:"x_cum_dev" db:"x_cum_dev"` - YCumDev *float64 `json:"y_cum_dev" db:"y_cum_dev"` - ZCumDev *float64 `json:"z_cum_dev" db:"z_cum_dev"` - TempCumDev *float64 `json:"temp_cum_dev" db:"temp_cum_dev"` - Elevation *float64 `json:"elevation" db:"elevation"` -} - -var ( - SaaParameterID = uuid.MustParse("6d12ca4c-b618-41cd-87a2-a248980a0d69") -) - -// TODO: when creating new timeseries, any depth based instruments should not be available for assignment - -const createSaaOpts = ` - INSERT INTO saa_opts (instrument_id, num_segments, bottom_elevation_timeseries_id, initial_time) - VALUES ($1, $2, $3, $4) -` - -func (q *Queries) CreateSaaOpts(ctx context.Context, instrumentID uuid.UUID, si SaaOpts) error { - _, err := q.db.ExecContext(ctx, createSaaOpts, instrumentID, si.NumSegments, si.BottomElevationTimeseriesID, si.InitialTime) - return err -} - -const updateSaaOpts = ` - UPDATE saa_opts SET - bottom_elevation_timeseries_id = $2, - initial_time = $3 - WHERE instrument_id = $1 -` - -func (q *Queries) UpdateSaaOpts(ctx context.Context, instrumentID uuid.UUID, si SaaOpts) error { - _, err := q.db.ExecContext(ctx, updateSaaOpts, instrumentID, si.BottomElevationTimeseriesID, si.InitialTime) - return err -} - -const getAllSaaSegmentsForInstrument = ` - SELECT * FROM v_saa_segment WHERE instrument_id = $1 -` - -func (q *Queries) GetAllSaaSegmentsForInstrument(ctx context.Context, instrumentID uuid.UUID) ([]SaaSegment, error) { - ssi := make([]SaaSegment, 0) - err := q.db.SelectContext(ctx, &ssi, getAllSaaSegmentsForInstrument, instrumentID) - return ssi, err -} - -const createSaaSegment = ` - INSERT INTO saa_segment ( - id, - instrument_id, - length_timeseries_id, - x_timeseries_id, - y_timeseries_id, - z_timeseries_id, - temp_timeseries_id - ) VALUES ($1, $2, $3, $4, $5, $6, $7) -` - -func (q *Queries) CreateSaaSegment(ctx context.Context, seg SaaSegment) error { - _, err := q.db.ExecContext(ctx, createSaaSegment, - seg.ID, - seg.InstrumentID, - seg.LengthTimeseriesID, - seg.XTimeseriesID, - seg.YTimeseriesID, - seg.ZTimeseriesID, - seg.TempTimeseriesID, - ) - return err -} - -const updateSaaSegment = ` - UPDATE saa_segment SET - length_timeseries_id = $3, - x_timeseries_id = $4, - y_timeseries_id = $5, - z_timeseries_id = $6, - temp_timeseries_id = $7 - WHERE id = $1 AND instrument_id = $2 -` - -func (q *Queries) UpdateSaaSegment(ctx context.Context, seg SaaSegment) error { - _, err := q.db.ExecContext(ctx, updateSaaSegment, - seg.ID, - seg.InstrumentID, - seg.LengthTimeseriesID, - seg.XTimeseriesID, - seg.YTimeseriesID, - seg.ZTimeseriesID, - seg.TempTimeseriesID, - ) - return err -} - -const getSaaMeasurementsForInstrument = ` - SELECT instrument_id, time, measurements - FROM v_saa_measurement - WHERE instrument_id = $1 AND time >= $2 AND time <= $3 - UNION - SELECT instrument_id, time, measurements - FROM v_saa_measurement - WHERE time IN (SELECT initial_time FROM saa_opts WHERE instrument_id = $1) - AND instrument_id = $1 - ORDER BY time ASC -` - -func (q *Queries) GetSaaMeasurementsForInstrument(ctx context.Context, instrumentID uuid.UUID, tw TimeWindow) ([]SaaMeasurements, error) { - mm := make([]SaaMeasurements, 0) - err := q.db.SelectContext(ctx, &mm, getSaaMeasurementsForInstrument, instrumentID, tw.After, tw.Before) - return mm, err -} diff --git a/api/internal/model/instrument_status.go b/api/internal/model/instrument_status.go deleted file mode 100644 index 9e260d45..00000000 --- a/api/internal/model/instrument_status.go +++ /dev/null @@ -1,100 +0,0 @@ -package model - -import ( - "context" - "encoding/json" - "time" - - "github.com/USACE/instrumentation-api/api/internal/util" - "github.com/google/uuid" -) - -// InstrumentStatus is an instrument status -type InstrumentStatus struct { - ID uuid.UUID `json:"id"` - Time time.Time `json:"time"` - StatusID uuid.UUID `json:"status_id" db:"status_id"` - Status string `json:"status"` -} - -// InstrumentStatusCollection is a collection of instrument status -type InstrumentStatusCollection struct { - Items []InstrumentStatus -} - -// UnmarshalJSON implements the UnmarshalJSONinterface -func (c *InstrumentStatusCollection) UnmarshalJSON(b []byte) error { - switch util.JSONType(b) { - case "ARRAY": - if err := json.Unmarshal(b, &c.Items); err != nil { - return err - } - case "OBJECT": - var s InstrumentStatus - if err := json.Unmarshal(b, &s); err != nil { - return err - } - c.Items = []InstrumentStatus{s} - default: - c.Items = make([]InstrumentStatus, 0) - } - return nil -} - -const listInstrumentStatusSQL = ` - SELECT - S.id, - S.status_id, - D.name AS status, - S.time - FROM instrument_status S - INNER JOIN status D - ON D.id = S.status_id -` - -const listInstrumentStatus = listInstrumentStatusSQL + ` - WHERE S.instrument_id = $1 ORDER BY time DESC -` - -// ListInstrumentStatus returns all status values for an instrument -func (q *Queries) ListInstrumentStatus(ctx context.Context, instrumentID uuid.UUID) ([]InstrumentStatus, error) { - ss := make([]InstrumentStatus, 0) - if err := q.db.SelectContext(ctx, &ss, listInstrumentStatus, instrumentID); err != nil { - return nil, err - } - return ss, nil -} - -const getInstrumentStatus = listInstrumentStatusSQL + ` - WHERE S.id = $1 -` - -// GetInstrumentStatus gets a single status -func (q *Queries) GetInstrumentStatus(ctx context.Context, statusID uuid.UUID) (InstrumentStatus, error) { - var s InstrumentStatus - if err := q.db.GetContext(ctx, &s, getInstrumentStatus, statusID); err != nil { - return s, err - } - return s, nil -} - -const createOrUpdateInstrumentStatus = ` - INSERT INTO instrument_status (instrument_id, status_id, time) VALUES ($1, $2, $3) - ON CONFLICT ON CONSTRAINT instrument_unique_status_in_time DO UPDATE SET status_id = EXCLUDED.status_id -` - -// CreateOrUpdateInstrumentStatus creates a Instrument Status, updates value on conflict -func (q *Queries) CreateOrUpdateInstrumentStatus(ctx context.Context, instrumentID, statusID uuid.UUID, statusTime time.Time) error { - _, err := q.db.ExecContext(ctx, createOrUpdateInstrumentStatus, instrumentID, statusID, statusTime) - return err -} - -const deleteInstrumentStatus = ` - DELETE FROM instrument_status WHERE id = $1 -` - -// DeleteInstrumentStatus deletes a status for an instrument -func (q *Queries) DeleteInstrumentStatus(ctx context.Context, statusID uuid.UUID) error { - _, err := q.db.ExecContext(ctx, deleteInstrumentStatus, statusID) - return err -} diff --git a/api/internal/model/measurement.go b/api/internal/model/measurement.go deleted file mode 100644 index 18029e7e..00000000 --- a/api/internal/model/measurement.go +++ /dev/null @@ -1,344 +0,0 @@ -package model - -import ( - "context" - "encoding/json" - "fmt" - "math" - "strings" - "time" - - "github.com/USACE/instrumentation-api/api/internal/util" - "github.com/google/uuid" -) - -// TimeseriesMeasurementCollectionCollection is a collection of timeseries measurement collections -// i.e an array of structs, each containing timeseries measurements not necessarily from the same time series -type TimeseriesMeasurementCollectionCollection struct { - Items []MeasurementCollection -} - -// TimeseriesIDs returns a slice of all timeseries IDs contained in the MeasurementCollectionCollection -func (cc *TimeseriesMeasurementCollectionCollection) TimeseriesIDs() map[uuid.UUID]struct{} { - dd := make(map[uuid.UUID]struct{}) - for _, item := range cc.Items { - dd[item.TimeseriesID] = struct{}{} - } - return dd -} - -// UnmarshalJSON implements UnmarshalJSON interface -func (cc *TimeseriesMeasurementCollectionCollection) UnmarshalJSON(b []byte) error { - switch util.JSONType(b) { - case "ARRAY": - if err := json.Unmarshal(b, &cc.Items); err != nil { - return err - } - case "OBJECT": - var mc MeasurementCollection - if err := json.Unmarshal(b, &mc); err != nil { - return err - } - cc.Items = []MeasurementCollection{mc} - default: - cc.Items = make([]MeasurementCollection, 0) - } - return nil -} - -// Measurement is a time and value associated with a timeseries -type Measurement struct { - TimeseriesID uuid.UUID `json:"-" db:"timeseries_id"` - Time time.Time `json:"time"` - Value FloatNanInf `json:"value"` - Error string `json:"error,omitempty"` - TimeseriesNote -} - -type FloatNanInf float64 - -func (j FloatNanInf) MarshalJSON() ([]byte, error) { - if math.IsNaN(float64(j)) || math.IsInf(float64(j), 0) { - return []byte("null"), nil - } - - return []byte(fmt.Sprintf("%f", float64(j))), nil -} - -func (j *FloatNanInf) UnmarshalJSON(v []byte) error { - switch strings.ToLower(string(v)) { - case `"nan"`, "nan", "", "null", "undefined": - *j = FloatNanInf(math.NaN()) - case `"inf"`, "inf": - *j = FloatNanInf(math.Inf(1)) - default: - var fv float64 - if err := json.Unmarshal(v, &fv); err != nil { - *j = FloatNanInf(math.NaN()) - return nil - } - *j = FloatNanInf(fv) - } - return nil -} - -// MeasurementLean is the minimalist representation of a timeseries measurement -// a key value pair where key is the timestamp, value is the measurement { : } -type MeasurementLean map[time.Time]float64 - -// MeasurementCollection is a collection of timeseries measurements -type MeasurementCollection struct { - TimeseriesID uuid.UUID `json:"timeseries_id" db:"timeseries_id"` - Items []Measurement `json:"items"` -} - -// MeasurementCollectionLean uses a minimalist representation of a timeseries measurement -type MeasurementCollectionLean struct { - TimeseriesID uuid.UUID `json:"timeseries_id" db:"timeseries_id"` - Items []MeasurementLean `json:"items"` -} - -type MeasurementGetter interface { - getTime() time.Time - getValue() float64 -} - -func (m Measurement) getTime() time.Time { - return m.Time -} - -func (m Measurement) getValue() float64 { - return float64(m.Value) -} - -// Should only ever be one -func (ml MeasurementLean) getTime() time.Time { - var t time.Time - for k := range ml { - t = k - } - return t -} - -// Should only ever be one -func (ml MeasurementLean) getValue() float64 { - var m float64 - for _, v := range ml { - m = v - } - return m -} - -const ( - createTimeseriesMeasurementSQL = ` - INSERT INTO timeseries_measurement (timeseries_id, time, value) VALUES ($1, $2, $3) - ` - createTimeseriesNoteSQL = ` - INSERT INTO timeseries_notes (timeseries_id, time, masked, validated, annotation) VALUES ($1, $2, $3, $4, $5) - ` -) - -const listTimeseriesMeasurements = ` - SELECT - m.timeseries_id, - m.time, - m.value, - n.masked, - n.validated, - n.annotation - FROM timeseries_measurement m - LEFT JOIN timeseries_notes n ON m.timeseries_id = n.timeseries_id AND m.time = n.time - INNER JOIN timeseries t ON t.id = m.timeseries_id - WHERE t.id = $1 AND m.time > $2 AND m.time < $3 ORDER BY m.time ASC -` - -// ListTimeseriesMeasurements returns a stored timeseries with slice of timeseries measurements populated -func (q *Queries) ListTimeseriesMeasurements(ctx context.Context, timeseriesID uuid.UUID, tw TimeWindow, threshold int) (*MeasurementCollection, error) { - items := make([]Measurement, 0) - if err := q.db.SelectContext(ctx, &items, listTimeseriesMeasurements, timeseriesID, tw.After, tw.Before); err != nil { - return nil, err - } - return &MeasurementCollection{TimeseriesID: timeseriesID, Items: LTTB(items, threshold)}, nil -} - -const deleteTimeseriesMeasurements = ` - DELETE FROM timeseries_measurement WHERE timeseries_id = $1 and time = $2 -` - -// DeleteTimeserieMeasurements deletes a timeseries Measurement -func (q *Queries) DeleteTimeserieMeasurements(ctx context.Context, timeseriesID uuid.UUID, time time.Time) error { - _, err := q.db.ExecContext(ctx, deleteTimeseriesMeasurements, timeseriesID, time) - return err -} - -const getTimeseriesConstantMeasurement = ` - SELECT - M.timeseries_id, - M.time, - M.value - FROM timeseries_measurement M - INNER JOIN v_timeseries_stored T ON T.id = M.timeseries_id - INNER JOIN parameter P ON P.id = T.parameter_id - WHERE T.instrument_id IN ( - SELECT instrument_id - FROM v_timeseries_stored T - WHERE t.id= $1 - ) - AND P.name = $2 -` - -// GetTimeseriesConstantMeasurement returns a constant timeseries measurement for the same instrument by constant name -func (q *Queries) GetTimeseriesConstantMeasurement(ctx context.Context, timeseriesID uuid.UUID, constantName string) (Measurement, error) { - var m Measurement - ms := make([]Measurement, 0) - if err := q.db.Select(&ms, getTimeseriesConstantMeasurement, timeseriesID, constantName); err != nil { - return m, err - } - if len(ms) > 0 { - m = ms[0] - } - return m, nil -} - -const createTimeseriesMeasruement = createTimeseriesMeasurementSQL + ` - ON CONFLICT ON CONSTRAINT timeseries_unique_time DO NOTHING -` - -func (q *Queries) CreateTimeseriesMeasurement(ctx context.Context, timeseriesID uuid.UUID, t time.Time, value float64) error { - _, err := q.db.ExecContext(ctx, createTimeseriesMeasruement, timeseriesID, t, value) - return err -} - -const createOrUpdateTimeseriesMeasurement = createTimeseriesMeasurementSQL + ` - ON CONFLICT ON CONSTRAINT timeseries_unique_time DO UPDATE SET value = EXCLUDED.value -` - -func (q *Queries) CreateOrUpdateTimeseriesMeasurement(ctx context.Context, timeseriesID uuid.UUID, t time.Time, value float64) error { - _, err := q.db.ExecContext(ctx, createOrUpdateTimeseriesMeasurement, timeseriesID, t, value) - return err -} - -const createTimeseriesNote = createTimeseriesNoteSQL + ` - ON CONFLICT ON CONSTRAINT notes_unique_time DO NOTHING -` - -func (q *Queries) CreateTimeseriesNote(ctx context.Context, timeseriesID uuid.UUID, t time.Time, n TimeseriesNote) error { - _, err := q.db.ExecContext(ctx, createTimeseriesNote, timeseriesID, t, n.Masked, n.Validated, n.Annotation) - return err -} - -const createOrUpdateTimeseriesNote = createTimeseriesNoteSQL + ` - ON CONFLICT ON CONSTRAINT notes_unique_time DO UPDATE SET masked = EXCLUDED.masked, validated = EXCLUDED.validated, annotation = EXCLUDED.annotation -` - -func (q *Queries) CreateOrUpdateTimeseriesNote(ctx context.Context, timeseriesID uuid.UUID, t time.Time, n TimeseriesNote) error { - _, err := q.db.ExecContext(ctx, createOrUpdateTimeseriesNote, timeseriesID, t, n.Masked, n.Validated, n.Annotation) - return err -} - -const deleteTimeseriesMeasurement = ` - DELETE FROM timeseries_measurement WHERE timeseries_id = $1 AND time = $2 -` - -func (q *Queries) DeleteTimeseriesMeasurement(ctx context.Context, timeseriesID uuid.UUID, t time.Time) error { - _, err := q.db.ExecContext(ctx, deleteTimeseriesMeasurementsRange, timeseriesID, t) - return err -} - -const deleteTimeseriesMeasurementsRange = ` - DELETE FROM timeseries_measurement WHERE timeseries_id = $1 AND time > $2 AND time < $3 -` - -func (q *Queries) DeleteTimeseriesMeasurementsByRange(ctx context.Context, timeseriesID uuid.UUID, start, end time.Time) error { - _, err := q.db.ExecContext(ctx, deleteTimeseriesMeasurementsRange, timeseriesID, start, end) - return err -} - -const deleteTimeseriesNote = ` - DELETE FROM timeseries_notes WHERE timeseries_id = $1 AND time > $2 AND time < $3 -` - -func (q *Queries) DeleteTimeseriesNote(ctx context.Context, timeseriesID uuid.UUID, start, end time.Time) error { - _, err := q.db.ExecContext(ctx, deleteTimeseriesNote, timeseriesID, start, end) - return err -} - -// A slightly modified LTTB (Largest-Triange-Three-Buckets) algorithm for downsampling timeseries measurements -// https://godoc.org/github.com/dgryski/go-lttb -func LTTB[T MeasurementGetter](data []T, threshold int) []T { - if threshold == 0 || threshold >= len(data) { - return data // Nothing to do - } - - if threshold < 3 { - threshold = 3 - } - - sampled := make([]T, 0, threshold) - - // Bucket size. Leave room for start and end data points - every := float64(len(data)-2) / float64(threshold-2) - - sampled = append(sampled, data[0]) // Always add the first point - - bucketStart := 1 - bucketCenter := int(math.Floor(every)) + 1 - - var a int - - for i := 0; i < threshold-2; i++ { - - bucketEnd := int(math.Floor(float64(i+2)*every)) + 1 - - // Calculate point average for next bucket (containing c) - avgRangeStart := bucketCenter - avgRangeEnd := bucketEnd - - if avgRangeEnd >= len(data) { - avgRangeEnd = len(data) - } - - avgRangeLength := float64(avgRangeEnd - avgRangeStart) - - var avgX, avgY float64 - for ; avgRangeStart < avgRangeEnd; avgRangeStart++ { - avgX += time.Duration(data[avgRangeStart].getTime().Unix()).Seconds() - avgY += data[avgRangeStart].getValue() - } - avgX /= avgRangeLength - avgY /= avgRangeLength - - // Get the range for this bucket - rangeOffs := bucketStart - rangeTo := bucketCenter - - // Point a - pointAX := time.Duration(data[a].getTime().UnixNano()).Seconds() - pointAY := data[a].getValue() - - maxArea := float64(-1.0) - - var nextA int - for ; rangeOffs < rangeTo; rangeOffs++ { - // Calculate triangle area over three buckets - area := (pointAX-avgX)*(data[rangeOffs].getValue()-pointAY) - (pointAX-time.Duration(data[rangeOffs].getTime().Unix()).Seconds())*(avgY-pointAY) - // We only care about the relative area here. - // Calling math.Abs() is slower than squaring - area *= area - if area > maxArea { - maxArea = area - nextA = rangeOffs // Next a is this b - } - } - - sampled = append(sampled, data[nextA]) // Pick this point from the bucket - a = nextA // This a is the next a (chosen b) - - bucketStart = bucketCenter - bucketCenter = bucketEnd - } - - sampled = append(sampled, data[len(data)-1]) // Always add last - - return sampled -} diff --git a/api/internal/model/measurement_inclinometer.go b/api/internal/model/measurement_inclinometer.go deleted file mode 100644 index 7f7727b1..00000000 --- a/api/internal/model/measurement_inclinometer.go +++ /dev/null @@ -1,213 +0,0 @@ -package model - -import ( - "context" - "encoding/json" - "fmt" - "time" - - "github.com/USACE/instrumentation-api/api/internal/util" - "github.com/google/uuid" - "github.com/jmoiron/sqlx/types" -) - -// Inclinometer Measurement is a time and values associated with a timeseries -type InclinometerMeasurement struct { - TimeseriesID uuid.UUID `json:"-" db:"timeseries_id"` - Time time.Time `json:"time"` - Values types.JSONText `json:"values"` - Creator uuid.UUID `json:"creator"` - CreateDate time.Time `json:"create_date" db:"create_date"` -} - -// Values associated with a inclinometer measurement -type InclinometerMeasurementValues struct { - Depth int `json:"depth" db:"depth"` - A0 float32 `json:"a0" db:"a0"` - A180 float32 `json:"a180" db:"a180"` - B0 float32 `json:"b0" db:"b0"` - B180 float32 `json:"b180" db:"b180"` - AChecksum float32 `json:"aChecksum" db:"a_checksum"` - AComb float32 `json:"aComb" db:"a_comb"` - AIncrement float32 `json:"aIncrement" db:"a_increment"` - ACumDev float32 `json:"aCumDev" db:"a_cum_dev"` - BChecksum float32 `json:"bChecksum" db:"b_checksum"` - BComb float32 `json:"bComb" db:"b_comb"` - BIncrement float32 `json:"bIncrement" db:"b_increment"` - BCumDev float32 `json:"bCumDev" db:"b_cum_dev"` -} - -// InclinometerMeasurementLean is the minimalist representation of a timeseries measurement -// a key value pair where key is the timestamp, value is the measurement { : } -type InclinometerMeasurementLean map[time.Time]types.JSONText - -// InclinometerMeasurementCollection is a collection of Inclinometer measurements -type InclinometerMeasurementCollection struct { - TimeseriesID uuid.UUID `json:"timeseries_id" db:"timeseries_id"` - Inclinometers []InclinometerMeasurement `json:"inclinometers"` -} - -// InclinometerMeasurementCollectionLean uses a minimalist representation of a Inclinometer timeseries measurement -type InclinometerMeasurementCollectionLean struct { - TimeseriesID uuid.UUID `json:"timeseries_id" db:"timeseries_id"` - Items []InclinometerMeasurementLean `json:"items"` -} - -// InclinometerMeasurementCollectionCollection is a collection of inclinometer measurement collections -// i.e an array of structs, each containing inclinometer measurements not necessarily from the same time series -type InclinometerMeasurementCollectionCollection struct { - Items []InclinometerMeasurementCollection -} - -// InclinometerTimeseriesIDs returns a slice of all timeseries IDs contained in the InclinometerMeasurementCollectionCollection -func (cc *InclinometerMeasurementCollectionCollection) TimeseriesIDs() map[uuid.UUID]struct{} { - dd := make(map[uuid.UUID]struct{}) - for _, item := range cc.Items { - dd[item.TimeseriesID] = struct{}{} - } - return dd -} - -// UnmarshalJSON implements UnmarshalJSON interface -func (cc *InclinometerMeasurementCollectionCollection) UnmarshalJSON(b []byte) error { - switch util.JSONType(b) { - case "ARRAY": - if err := json.Unmarshal(b, &cc.Items); err != nil { - return err - } - case "OBJECT": - var mc InclinometerMeasurementCollection - if err := json.Unmarshal(b, &mc); err != nil { - return err - } - cc.Items = []InclinometerMeasurementCollection{mc} - default: - cc.Items = make([]InclinometerMeasurementCollection, 0) - } - return nil -} - -const listInclinometerMeasurements = ` - SELECT M.timeseries_id, - M.time, - M.creator, - M.create_date - FROM inclinometer_measurement M - INNER JOIN timeseries T - ON T.id = M.timeseries_id - WHERE T.id = $1 AND M.time > $2 AND M.time < $3 ORDER BY M.time DESC -` - -// ListInclinometersMeasurements returns a timeseries with slice of inclinometer measurements populated -func (q *Queries) ListInclinometerMeasurements(ctx context.Context, timeseriesID uuid.UUID, tw TimeWindow) (*InclinometerMeasurementCollection, error) { - mc := InclinometerMeasurementCollection{TimeseriesID: timeseriesID} - if err := q.db.SelectContext(ctx, &mc.Inclinometers, listInclinometerMeasurements, timeseriesID, tw.After, tw.Before); err != nil { - return nil, err - } - return &mc, nil -} - -func listInclinometerMeasurementsValues(inclinometerConstant string) string { - if inclinometerConstant == "0" { - return ` - select items.depth, - items.a0, - items.a180, - items.b0, - items.b180, - (items.a0 + items.a180) AS a_checksum, - (items.a0 - items.a180)/2 AS a_comb, - 0 AS a_increment, - 0 AS a_cum_dev, - (items.b0 + items.b180) AS b_checksum, - (items.b0 - items.b180)/2 AS b_comb, - 0 AS b_increment, - 0 AS b_cum_dev - from inclinometer_measurement, jsonb_to_recordset(inclinometer_measurement.values) as items(depth int, a0 real, a180 real, b0 real, b180 real) - ` - } else { - return fmt.Sprintf(` - select items.depth, - items.a0, - items.a180, - items.b0, - items.b180, - (items.a0 + items.a180) AS a_checksum, - (items.a0 - items.a180)/2 AS a_comb, - (items.a0 - items.a180) / 2 / %s * 24 AS a_increment, - SUM((items.a0 - items.a180) / 2 / %s * 24) OVER (ORDER BY depth desc) AS a_cum_dev, - (items.b0 + items.b180) AS b_checksum, - (items.b0 - items.b180)/2 AS b_comb, - (items.b0 - items.b180) / 2 / %s * 24 AS b_increment, - SUM((items.b0 - items.b180) / 2 / %s * 24) OVER (ORDER BY depth desc) AS b_cum_dev - from inclinometer_measurement, jsonb_to_recordset(inclinometer_measurement.values) as items(depth int, a0 real, a180 real, b0 real, b180 real) - `, inclinometerConstant, inclinometerConstant, inclinometerConstant, inclinometerConstant) - } -} - -func (q *Queries) ListInclinometerMeasurementValues(ctx context.Context, timeseriesID uuid.UUID, time time.Time, inclConstant float64) ([]*InclinometerMeasurementValues, error) { - constant := fmt.Sprintf("%.0f", inclConstant) - v := []*InclinometerMeasurementValues{} - if err := q.db.SelectContext(ctx, &v, listInclinometerMeasurementsValues(constant)+" WHERE timeseries_id = $1 AND time = $2 ORDER BY depth", timeseriesID, time); err != nil { - return nil, err - } - return v, nil -} - -const deleteInclinometerMeasurement = ` - DELETE FROM inclinometer_measurement WHERE timeseries_id = $1 and time = $2 -` - -// DeleteInclinometerMeasurements deletes a inclinometer Measurement -func (q *Queries) DeleteInclinometerMeasurement(ctx context.Context, timeseriesID uuid.UUID, time time.Time) error { - _, err := q.db.ExecContext(ctx, deleteInclinometerMeasurement, timeseriesID, time) - return err -} - -const createOrUpdateInclinometerMeasurement = ` - INSERT INTO inclinometer_measurement (timeseries_id, time, values, creator, create_date) VALUES ($1, $2, $3, $4, $5) - ON CONFLICT ON CONSTRAINT inclinometer_unique_time DO UPDATE SET values = EXCLUDED.values; -` - -// CreateInclinometerMeasurements creates many inclinometer from an array of inclinometer -// If a inclinometer measurement already exists for a given timeseries_id and time, the values is updated -func (q *Queries) CreateOrUpdateInclinometerMeasurement(ctx context.Context, timeseriesID uuid.UUID, t time.Time, values types.JSONText, profileID uuid.UUID, createDate time.Time) error { - _, err := q.db.ExecContext(ctx, createOrUpdateInclinometerMeasurement, timeseriesID, t, values, profileID, createDate) - return err -} - -const listInstrumentIDsFromTimeseriesID = ` - SELECT instrument_id FROM v_timeseries_stored WHERE id= $1 -` - -func (q *Queries) ListInstrumentIDsFromTimeseriesID(ctx context.Context, timeseriesID uuid.UUID) ([]uuid.UUID, error) { - instrumentIDs := make([]uuid.UUID, 0) - if err := q.db.SelectContext(ctx, &instrumentIDs, listInstrumentIDsFromTimeseriesID, timeseriesID); err != nil { - return nil, err - } - return instrumentIDs, nil -} - -const listParameterIDsFromParameterName = ` - SELECT id FROM parameter WHERE name = $1 -` - -func (q *Queries) ListParameterIDsFromParameterName(ctx context.Context, parameterName string) ([]uuid.UUID, error) { - parameterIDs := make([]uuid.UUID, 0) - if err := q.db.SelectContext(ctx, ¶meterIDs, listParameterIDsFromParameterName, parameterName); err != nil { - return nil, err - } - return parameterIDs, nil -} - -const listUnitIDsFromUnitName = ` - SELECT id FROM unit WHERE name = $1 -` - -func (q *Queries) ListUnitIDsFromUnitName(ctx context.Context, unitName string) ([]uuid.UUID, error) { - unitIDs := make([]uuid.UUID, 0) - if err := q.db.SelectContext(ctx, &unitIDs, listUnitIDsFromUnitName, unitName); err != nil { - return nil, err - } - return unitIDs, nil -} diff --git a/api/internal/model/opendcs.go b/api/internal/model/opendcs.go deleted file mode 100644 index 2f0366ab..00000000 --- a/api/internal/model/opendcs.go +++ /dev/null @@ -1,48 +0,0 @@ -package model - -import ( - "context" - - "github.com/google/uuid" -) - -// Site is an instrument, represented as an OpenDCS Site -type Site struct { - Elevation string `xml:"Elevation"` - ElevationUnits string `xml:"ElevationUnits"` - Description string `xml:"Description"` - SiteName SiteName `xml:"SiteName"` -} - -// SiteName is SiteName -type SiteName struct { - ID uuid.UUID `xml:",chardata"` - NameType string `xml:",attr"` -} - -// AsSite returns an instrument represented as an OpenDCS Site -func (n *Instrument) AsSite() Site { - return Site{ - Elevation: "", - ElevationUnits: "", - Description: n.Name, - SiteName: SiteName{ - ID: n.ID, - NameType: "uuid", - }, - } -} - -// ListOpendcsSites returns an array of instruments from the database -// And formats them as OpenDCS Sites -func (q *Queries) ListOpendcsSites(ctx context.Context) ([]Site, error) { - nn, err := q.ListInstruments(ctx) - if err != nil { - return make([]Site, 0), err - } - ss := make([]Site, len(nn)) - for idx := range nn { - ss[idx] = nn[idx].AsSite() - } - return ss, nil -} diff --git a/api/internal/model/plot_config.go b/api/internal/model/plot_config.go deleted file mode 100644 index 695b5d94..00000000 --- a/api/internal/model/plot_config.go +++ /dev/null @@ -1,167 +0,0 @@ -package model - -import ( - "context" - "fmt" - "strings" - "time" - - "github.com/google/uuid" -) - -const ( - ScatterLinePlotType = "scatter-line" - ProfilePlotType = "profile" - ContourPlotType = "contour" - BullseyePlotType = "bullseye" -) - -type PlotConfig struct { - ID uuid.UUID `json:"id"` - Name string `json:"name"` - Slug string `json:"slug"` - ProjectID uuid.UUID `json:"project_id" db:"project_id"` - ReportConfigs dbJSONSlice[IDSlugName] `json:"report_configs" db:"report_configs"` - PlotType string `json:"plot_type" db:"plot_type"` - Display Opts `json:"display" db:"display"` - PlotConfigSettings - AuditInfo -} - -// PlotConfigSettings describes options for displaying the plot consistently. -// Specifically, whether to ignore data entries in a timeseries that have been masked, -// or whether to display user comments. -type PlotConfigSettings struct { - ShowMasked bool `json:"show_masked" db:"show_masked"` - ShowNonValidated bool `json:"show_nonvalidated" db:"show_nonvalidated"` - ShowComments bool `json:"show_comments" db:"show_comments"` - AutoRange bool `json:"auto_range" db:"auto_range"` - DateRange string `json:"date_range" db:"date_range"` - Threshold int `json:"threshold" db:"threshold"` -} - -// DateRangeTimeWindow creates a TimeWindow from a date range string. -// -// Acceptable date range strings are "lifetime", "5 years", "1 year", or a fixed date in the -// format "YYYY-MM-DD YYYY-MM-DD" with after and before dates separated by a single whitespace. -func (pc *PlotConfig) DateRangeTimeWindow() (TimeWindow, error) { - switch dr := strings.ToLower(pc.DateRange); dr { - case "lifetime": - return TimeWindow{After: time.Time{}, Before: time.Now()}, nil - case "5 years": - return TimeWindow{After: time.Now().AddDate(-5, 0, 0), Before: time.Now()}, nil - case "1 year": - return TimeWindow{After: time.Now().AddDate(-1, 0, 0), Before: time.Now()}, nil - case "1 month": - return TimeWindow{After: time.Now().AddDate(0, -1, 0), Before: time.Now()}, nil - default: - cdr := strings.Split(dr, " ") - invalidDateErr := fmt.Errorf("invalid date range; custom date range must be in format \"YYYY-MM-DD YYYY-MM-DD\"") - if len(cdr) != 2 { - return TimeWindow{}, invalidDateErr - } - after, err := time.Parse("2006-01-02", cdr[0]) - if err != nil { - return TimeWindow{}, invalidDateErr - } - before, err := time.Parse("2006-01-02", cdr[1]) - if err != nil { - return TimeWindow{}, invalidDateErr - } - return TimeWindow{After: after, Before: before}, nil - } -} - -const listPlotConfigsSQL = ` - SELECT - id, - slug, - name, - project_id, - report_configs, - creator, - create_date, - updater, - update_date, - show_masked, - show_nonvalidated, - show_comments, - auto_range, - date_range, - threshold, - plot_type, - display - FROM v_plot_configuration -` - -// PlotConfig -const listPlotConfigs = listPlotConfigsSQL + ` - WHERE project_id = $1 -` - -func (q *Queries) ListPlotConfigs(ctx context.Context, projectID uuid.UUID) ([]PlotConfig, error) { - ppc := make([]PlotConfig, 0) - if err := q.db.SelectContext(ctx, &ppc, listPlotConfigs, projectID); err != nil { - return make([]PlotConfig, 0), err - } - return ppc, nil -} - -const getPlotConfig = listPlotConfigsSQL + ` - WHERE id = $1 -` - -func (q *Queries) GetPlotConfig(ctx context.Context, plotConfigID uuid.UUID) (PlotConfig, error) { - var pc PlotConfig - err := q.db.GetContext(ctx, &pc, getPlotConfig, plotConfigID) - return pc, err -} - -const createPlotConfig = ` - INSERT INTO plot_configuration (slug, name, project_id, creator, create_date, plot_type) VALUES (slugify($1, 'plot_configuration'), $1, $2, $3, $4, $5) - RETURNING id -` - -func (q *Queries) CreatePlotConfig(ctx context.Context, pc PlotConfig) (uuid.UUID, error) { - var pcID uuid.UUID - err := q.db.GetContext(ctx, &pcID, createPlotConfig, pc.Name, pc.ProjectID, pc.CreatorID, pc.CreateDate, pc.PlotType) - return pcID, err -} - -// PlotConfigSettings -const createPlotConfigSettings = ` - INSERT INTO plot_configuration_settings (id, show_masked, show_nonvalidated, show_comments, auto_range, date_range, threshold) - VALUES ($1, $2, $3, $4, $5, $6, $7) -` - -func (q *Queries) CreatePlotConfigSettings(ctx context.Context, pcID uuid.UUID, pc PlotConfigSettings) error { - _, err := q.db.ExecContext(ctx, createPlotConfigSettings, pcID, pc.ShowMasked, pc.ShowNonValidated, pc.ShowComments, pc.AutoRange, pc.DateRange, pc.Threshold) - return err -} - -const updatePlotConfig = ` - UPDATE plot_configuration SET name = $3, updater = $4, update_date = $5 WHERE project_id = $1 AND id = $2 -` - -func (q *Queries) UpdatePlotConfig(ctx context.Context, pc PlotConfig) error { - _, err := q.db.ExecContext(ctx, updatePlotConfig, pc.ProjectID, pc.ID, pc.Name, pc.UpdaterID, pc.UpdateDate) - return err -} - -const deletePlotConfig = ` - DELETE from plot_configuration WHERE project_id = $1 AND id = $2 -` - -func (q *Queries) DeletePlotConfig(ctx context.Context, projectID, plotConfigID uuid.UUID) error { - _, err := q.db.ExecContext(ctx, deletePlotConfig, projectID, plotConfigID) - return err -} - -const deletePlotConfigSettings = ` - DELETE FROM plot_configuration_settings WHERE id = $1 -` - -func (q *Queries) DeletePlotConfigSettings(ctx context.Context, plotConfigID uuid.UUID) error { - _, err := q.db.ExecContext(ctx, deletePlotConfigSettings, plotConfigID) - return err -} diff --git a/api/internal/model/plot_config_bullseye.go b/api/internal/model/plot_config_bullseye.go deleted file mode 100644 index c2ea9da6..00000000 --- a/api/internal/model/plot_config_bullseye.go +++ /dev/null @@ -1,89 +0,0 @@ -package model - -import ( - "context" - "encoding/json" - "fmt" - "time" - - "github.com/google/uuid" -) - -type PlotConfigBullseyePlot struct { - PlotConfig - Display PlotConfigBullseyePlotDisplay `json:"display" db:"display"` -} - -type PlotConfigBullseyePlotDisplay struct { - XAxisTimeseriesID uuid.UUID `json:"x_axis_timeseries_id" db:"x_axis_timeseries_id"` - YAxisTimeseriesID uuid.UUID `json:"y_axis_timeseries_id" db:"y_axis_timeseries_id"` -} - -func (d *PlotConfigBullseyePlotDisplay) Scan(src interface{}) error { - b, ok := src.(string) - if !ok { - return fmt.Errorf("type assertion failed") - } - return json.Unmarshal([]byte(b), d) -} - -type PlotConfigMeasurementBullseyePlot struct { - Time time.Time `json:"time" db:"time"` - X *float64 `json:"x" db:"x"` - Y *float64 `json:"y" db:"y"` -} - -const createPlotBullseyeConfig = ` - INSERT INTO plot_bullseye_config (plot_config_id, x_axis_timeseries_id, y_axis_timeseries_id) VALUES ($1, $2, $3) -` - -func (q *Queries) CreatePlotBullseyeConfig(ctx context.Context, plotConfigID uuid.UUID, cfg PlotConfigBullseyePlotDisplay) error { - _, err := q.db.ExecContext(ctx, createPlotBullseyeConfig, plotConfigID, cfg.XAxisTimeseriesID, cfg.YAxisTimeseriesID) - return err -} - -const updatePlotBullseyeConfig = ` - UPDATE plot_bullseye_config SET x_axis_timeseries_id=$2, y_axis_timeseries_id=$3 WHERE plot_config_id=$1 -` - -func (q *Queries) UpdatePlotBullseyeConfig(ctx context.Context, plotConfigID uuid.UUID, cfg PlotConfigBullseyePlotDisplay) error { - _, err := q.db.ExecContext(ctx, updatePlotBullseyeConfig, plotConfigID, cfg.XAxisTimeseriesID, cfg.YAxisTimeseriesID) - return err -} - -const deletePlotBullseyeConfig = ` - DELETE FROM plot_bullseye_config WHERE plog_config_id = $1 -` - -func (q *Queries) DeletePlotBullseyeConfig(ctx context.Context, plotConfigID uuid.UUID) error { - _, err := q.db.ExecContext(ctx, deletePlotBullseyeConfig, plotConfigID) - return err -} - -const listPlotConfigMeasurementsBullseyePlot = ` - SELECT - t.time, - locf(xm.value) AS x, - locf(ym.value) AS y - FROM plot_bullseye_config pc - INNER JOIN timeseries_measurement t - ON t.timeseries_id = pc.x_axis_timeseries_id - OR t.timeseries_id = pc.y_axis_timeseries_id - LEFT JOIN timeseries_measurement xm - ON xm.timeseries_id = pc.x_axis_timeseries_id - AND xm.time = t.time - LEFT JOIN timeseries_measurement ym - ON ym.timeseries_id = pc.y_axis_timeseries_id - AND ym.time = t.time - WHERE pc.plot_config_id = $1 - AND t.time > $2 - AND t.time < $3 - GROUP BY t.time - ORDER BY t.time ASC -` - -func (q *Queries) ListPlotConfigMeasurementsBullseyePlot(ctx context.Context, plotConfigID uuid.UUID, tw TimeWindow) ([]PlotConfigMeasurementBullseyePlot, error) { - pcmm := make([]PlotConfigMeasurementBullseyePlot, 0) - err := q.db.SelectContext(ctx, &pcmm, listPlotConfigMeasurementsBullseyePlot, plotConfigID, tw.After, tw.Before) - return pcmm, err -} diff --git a/api/internal/model/plot_config_contour.go b/api/internal/model/plot_config_contour.go deleted file mode 100644 index a4e4bdb2..00000000 --- a/api/internal/model/plot_config_contour.go +++ /dev/null @@ -1,136 +0,0 @@ -package model - -import ( - "context" - "encoding/json" - "fmt" - "time" - - "github.com/google/uuid" -) - -type PlotConfigContourPlot struct { - PlotConfig - Display PlotConfigContourPlotDisplay `json:"display" db:"display"` -} - -type PlotConfigContourPlotDisplay struct { - TimeseriesIDs dbSlice[uuid.UUID] `json:"timeseries_ids" db:"timeseries_ids"` - Time *time.Time `json:"time" db:"time"` - LocfBackfill string `json:"locf_backfill" db:"locf_backfill"` - GradientSmoothing bool `json:"gradient_smoothing" db:"gradient_smoothing"` - ContourSmoothing bool `json:"contour_smoothing" db:"contour_smoothing"` - ShowLabels bool `json:"show_labels" db:"show_labels"` -} - -func (d *PlotConfigContourPlotDisplay) Scan(src interface{}) error { - b, ok := src.(string) - if !ok { - return fmt.Errorf("type assertion failed") - } - return json.Unmarshal([]byte(b), d) -} - -type PlotConfigMeasurementContourPlot struct { - X float64 `json:"x" db:"x"` - Y float64 `json:"y" db:"y"` - Z *float64 `json:"z" db:"z"` -} - -type AggregatePlotConfigMeasurementsContourPlot struct { - X []float64 `json:"x" db:"x"` - Y []float64 `json:"y" db:"y"` - Z []*float64 `json:"z" db:"z"` -} - -const createPlotContourConfig = ` - INSERT INTO plot_contour_config (plot_config_id, "time", locf_backfill, gradient_smoothing, contour_smoothing, show_labels) - VALUES ($1, $2, $3, $4, $5, $6) -` - -func (q *Queries) CreatePlotContourConfig(ctx context.Context, plotConfigID uuid.UUID, cfg PlotConfigContourPlotDisplay) error { - _, err := q.db.ExecContext(ctx, createPlotContourConfig, plotConfigID, cfg.Time, cfg.LocfBackfill, cfg.GradientSmoothing, cfg.ContourSmoothing, cfg.ShowLabels) - return err -} - -const updatePlotContourConfig = ` - UPDATE plot_contour_config SET "time"=$2, locf_backfill=$3, gradient_smoothing=$4, contour_smoothing=$5, show_labels=$6 - WHERE plot_config_id=$1 -` - -func (q *Queries) UpdatePlotContourConfig(ctx context.Context, plotConfigID uuid.UUID, cfg PlotConfigContourPlotDisplay) error { - _, err := q.db.ExecContext(ctx, updatePlotContourConfig, plotConfigID, cfg.Time, cfg.LocfBackfill, cfg.GradientSmoothing, cfg.ContourSmoothing, cfg.ShowLabels) - return err -} - -const deletePlotContourConfig = ` - DELETE FROM plot_contour_config WHERE plog_config_id = $1 -` - -func (q *Queries) DeletePlotContourConfig(ctx context.Context, plotConfigID uuid.UUID) error { - _, err := q.db.ExecContext(ctx, deletePlotContourConfig, plotConfigID) - return err -} - -const createPlotContourConfigTimeseries = ` - INSERT INTO plot_contour_config_timeseries (plot_contour_config_id, timeseries_id) VALUES ($1, $2) - ON CONFLICT (plot_contour_config_id, timeseries_id) DO NOTHING -` - -func (q *Queries) CreatePlotContourConfigTimeseries(ctx context.Context, plotConfigID, timeseriesID uuid.UUID) error { - _, err := q.db.ExecContext(ctx, createPlotContourConfigTimeseries, plotConfigID, timeseriesID) - return err -} - -const deleteAllPlotContourConfigTimeseries = ` - DELETE FROM plot_contour_config_timeseries WHERE plot_contour_config_id = $1 -` - -func (q *Queries) DeleteAllPlotContourConfigTimeseries(ctx context.Context, plotConfigID uuid.UUID) error { - _, err := q.db.ExecContext(ctx, deleteAllPlotContourConfigTimeseries, plotConfigID) - return err -} - -const listPlotContourConfigTimes = ` - SELECT DISTINCT mm.time - FROM plot_contour_config_timeseries pcts - INNER JOIN timeseries_measurement mm ON mm.timeseries_id = pcts.timeseries_id - WHERE pcts.plot_contour_config_id = $1 - AND mm.time > $2 - AND mm.time < $3 - ORDER BY time ASC -` - -func (q *Queries) ListPlotConfigTimesContourPlot(ctx context.Context, plotConfigID uuid.UUID, tw TimeWindow) ([]time.Time, error) { - tt := make([]time.Time, 0) - err := q.db.SelectContext(ctx, &tt, listPlotContourConfigTimes, plotConfigID, tw.After, tw.Before) - return tt, err -} - -// NOTE: this assumes all geometries are stored natively as WGS84 (EPSG:4326) -const listPlotConfigMeasurementsContourPlot = ` - SELECT - oi.x, - oi.y, - locf(mm.value) AS z - FROM plot_contour_config pc - LEFT JOIN plot_contour_config_timeseries pcts ON pcts.plot_contour_config_id = pc.plot_config_id - LEFT JOIN timeseries_measurement mm ON mm.timeseries_id = pcts.timeseries_id - INNER JOIN timeseries ts ON ts.id = pcts.timeseries_id - INNER JOIN ( - SELECT - ii.id, - ST_X(ST_Centroid(ii.geometry)) AS x, - ST_Y(ST_Centroid(ii.geometry)) AS y - FROM instrument ii - ) oi ON oi.id = ts.instrument_id - WHERE plot_config_id = $1 - AND mm.time = $2 - GROUP BY pc.plot_config_id, pcts.timeseries_id, oi.x, oi.y -` - -func (q *Queries) ListPlotConfigMeasurementsContourPlot(ctx context.Context, plotConfigID uuid.UUID, t time.Time) ([]PlotConfigMeasurementContourPlot, error) { - pcmm := make([]PlotConfigMeasurementContourPlot, 0) - err := q.db.SelectContext(ctx, &pcmm, listPlotConfigMeasurementsContourPlot, plotConfigID, t) - return pcmm, err -} diff --git a/api/internal/model/plot_config_profile.go b/api/internal/model/plot_config_profile.go deleted file mode 100644 index 848d66e3..00000000 --- a/api/internal/model/plot_config_profile.go +++ /dev/null @@ -1,45 +0,0 @@ -package model - -import ( - "context" - "encoding/json" - "fmt" - - "github.com/google/uuid" -) - -type PlotConfigProfilePlot struct { - PlotConfig - Display PlotConfigProfilePlotDisplay `json:"display" db:"display"` -} - -type PlotConfigProfilePlotDisplay struct { - InstrumentID uuid.UUID `json:"instrument_id" db:"instrument_id"` - InstrumentType string `json:"instrument_type,omitempty" db:"instrument_type"` -} - -func (d *PlotConfigProfilePlotDisplay) Scan(src interface{}) error { - b, ok := src.(string) - if !ok { - return fmt.Errorf("type assertion failed") - } - return json.Unmarshal([]byte(b), d) -} - -const createPlotProfileConfig = ` - INSERT INTO plot_profile_config (plot_config_id, instrument_id) VALUES ($1, $2) -` - -func (q *Queries) CreatePlotProfileConfig(ctx context.Context, plotConfigID uuid.UUID, d PlotConfigProfilePlotDisplay) error { - _, err := q.db.ExecContext(ctx, createPlotProfileConfig, plotConfigID, d.InstrumentID) - return err -} - -const updatePlotProfileConfig = ` - UPDATE plot_profile_config SET instrument_id=$2 WHERE plot_config_id=$1 -` - -func (q *Queries) UpdatePlotProfileConfig(ctx context.Context, plotConfigID uuid.UUID, d PlotConfigProfilePlotDisplay) error { - _, err := q.db.ExecContext(ctx, updatePlotProfileConfig, plotConfigID, d.InstrumentID) - return err -} diff --git a/api/internal/model/plot_config_scatter_line.go b/api/internal/model/plot_config_scatter_line.go deleted file mode 100644 index a28faebe..00000000 --- a/api/internal/model/plot_config_scatter_line.go +++ /dev/null @@ -1,144 +0,0 @@ -package model - -import ( - "context" - "encoding/json" - "fmt" - - "github.com/google/uuid" -) - -type PlotConfigScatterLinePlot struct { - PlotConfig - Display PlotConfigScatterLineDisplay `json:"display" db:"display"` - // TODO AlertConfigIDs []string -} - -type PlotConfigScatterLineDisplay struct { - Traces []PlotConfigScatterLineTimeseriesTrace `json:"traces"` - Layout PlotConfigScatterLineLayout `json:"layout"` -} - -func (d *PlotConfigScatterLineDisplay) Scan(src interface{}) error { - b, ok := src.(string) - if !ok { - return fmt.Errorf("type assertion failed") - } - return json.Unmarshal([]byte(b), d) -} - -type PlotConfigScatterLineTimeseriesTrace struct { - PlotConfigurationID uuid.UUID `json:"plot_configuration_id"` - TimeseriesID uuid.UUID `json:"timeseries_id"` - Name string `json:"name"` // read-only - Parameter string `json:"parameter"` // read-only - TraceOrder int `json:"trace_order"` - TraceType string `json:"trace_type"` - Color string `json:"color"` - LineStyle string `json:"line_style"` - Width float32 `json:"width"` - ShowMarkers bool `json:"show_markers"` - YAxis string `json:"y_axis"` // y1 or y2, default y1 -} - -type PlotConfigScatterLineLayout struct { - CustomShapes []PlotConfigScatterLineCustomShape `json:"custom_shapes"` - YAxisTitle *string `json:"y_axis_title"` - Y2AxisTitle *string `json:"y2_axis_title"` -} - -type PlotConfigScatterLineCustomShape struct { - PlotConfigurationID uuid.UUID `json:"plot_configuration_id"` - Enabled bool `json:"enabled"` - Name string `json:"name"` - DataPoint float32 `json:"data_point"` - Color string `json:"color"` -} - -const createPlotConfigScatterLineLayout = `INSERT INTO plot_scatter_line_config (plot_config_id, y_axis_title, y2_axis_title) VALUES ($1, $2, $3)` - -func (q *Queries) CreatePlotConfigScatterLineLayout(ctx context.Context, pcID uuid.UUID, layout PlotConfigScatterLineLayout) error { - _, err := q.db.ExecContext(ctx, createPlotConfigScatterLineLayout, pcID, layout.YAxisTitle, layout.Y2AxisTitle) - return err -} - -const updatePlotConfigScatterLineLayout = `UPDATE plot_scatter_line_config SET y_axis_title=$2, y2_axis_title=$3 WHERE plot_config_id=$1` - -func (q *Queries) UpdatePlotConfigScatterLineLayout(ctx context.Context, pcID uuid.UUID, layout PlotConfigScatterLineLayout) error { - _, err := q.db.ExecContext(ctx, updatePlotConfigScatterLineLayout, pcID, layout.YAxisTitle, layout.Y2AxisTitle) - return err -} - -// PlotConfigTimeseriesTrace -const createPlotConfigTimeseriesTrace = ` - INSERT INTO plot_configuration_timeseries_trace - (plot_configuration_id, timeseries_id, trace_order, color, line_style, width, show_markers, y_axis) VALUES - ($1, $2, $3, $4, $5, $6, $7, $8) -` - -func (q *Queries) CreatePlotConfigTimeseriesTrace(ctx context.Context, tr PlotConfigScatterLineTimeseriesTrace) error { - _, err := q.db.ExecContext( - ctx, createPlotConfigTimeseriesTrace, - tr.PlotConfigurationID, tr.TimeseriesID, tr.TraceOrder, tr.Color, tr.LineStyle, tr.Width, tr.ShowMarkers, tr.YAxis, - ) - return err -} - -const updatePlotConfigTimeseriesTrace = ` - UPDATE plot_configuration_timeseries_trace - SET trace_order=$3, color=$4, line_style=$5, width=$6, show_markers=$7, y_axis=$8 - WHERE plot_configuration_id=$1 AND timeseries_id=$2 -` - -func (q *Queries) UpdatePlotConfigTimeseriesTrace(ctx context.Context, tr PlotConfigScatterLineTimeseriesTrace) error { - _, err := q.db.ExecContext( - ctx, createPlotConfigTimeseriesTrace, - tr.PlotConfigurationID, tr.TimeseriesID, tr.TraceOrder, tr.Color, tr.LineStyle, tr.Width, tr.ShowMarkers, tr.YAxis, - ) - return err -} - -const deleteAllPlotConfigTimeseriesTraces = ` - DELETE FROM plot_configuration_timeseries_trace WHERE plot_configuration_id=$1 -` - -func (q *Queries) DeleteAllPlotConfigTimeseriesTraces(ctx context.Context, pcID uuid.UUID) error { - _, err := q.db.ExecContext(ctx, deleteAllPlotConfigTimeseriesTraces, pcID) - return err -} - -// PlotConfigCustomShape -const createPlotConfigCustomShape = ` - INSERT INTO plot_configuration_custom_shape - (plot_configuration_id, enabled, name, data_point, color) VALUES ($1, $2, $3, $4, $5) -` - -func (q *Queries) CreatePlotConfigCustomShape(ctx context.Context, cs PlotConfigScatterLineCustomShape) error { - _, err := q.db.ExecContext( - ctx, createPlotConfigCustomShape, - cs.PlotConfigurationID, cs.Enabled, cs.Name, cs.DataPoint, cs.Color, - ) - return err -} - -const updatePlotConfigCustomShape = ` - UPDATE plot_configuration_custom_shape - SET enabled=$2, name=$3, data_point=$4, color=$5 WHERE plot_configuration_id=$1 -` - -func (q *Queries) UpdatePlotConfigCustomShape(ctx context.Context, cs PlotConfigScatterLineCustomShape) error { - _, err := q.db.ExecContext( - ctx, updatePlotConfigCustomShape, - cs.PlotConfigurationID, cs.Enabled, cs.Name, cs.DataPoint, cs.Color, - ) - return err -} - -const deleteAllPlotConfigCustomShapes = ` - DELETE FROM plot_configuration_custom_shape WHERE plot_configuration_id=$1 -` - -func (q *Queries) DeleteAllPlotConfigCustomShapes(ctx context.Context, pcID uuid.UUID) error { - _, err := q.db.ExecContext(ctx, deleteAllPlotConfigCustomShapes, pcID) - return err -} diff --git a/api/internal/model/profile.go b/api/internal/model/profile.go deleted file mode 100644 index 9b075bb4..00000000 --- a/api/internal/model/profile.go +++ /dev/null @@ -1,188 +0,0 @@ -package model - -import ( - "context" - "time" - - "github.com/USACE/instrumentation-api/api/internal/password" - "github.com/google/uuid" -) - -// Profile is a user profile -type Profile struct { - ID uuid.UUID `json:"id" db:"id"` - Tokens []TokenInfoProfile `json:"tokens"` - IsAdmin bool `json:"is_admin" db:"is_admin"` - Roles dbSlice[string] `json:"roles" db:"roles"` - ProfileInfo -} - -// TokenInfoProfile is token information embedded in Profile -type TokenInfoProfile struct { - TokenID string `json:"token_id" db:"token_id"` - Issued time.Time `json:"issued"` -} - -// ProfileInfo is information necessary to construct a profile -type ProfileInfo struct { - EDIPI int `json:"-" db:"edipi"` - Username string `json:"username" db:"username"` - DisplayName string `json:"display_name" db:"display_name"` - Email string `json:"email" db:"email"` -} - -// TokenInfo represents the information held in the database about a token -type TokenInfo struct { - ID uuid.UUID `json:"-"` - TokenID string `json:"token_id" db:"token_id"` - ProfileID uuid.UUID `json:"profile_id" db:"profile_id"` - Issued time.Time `json:"issued"` - Hash string `json:"-"` -} - -// Token includes all TokenInfo and the actual token string generated for a user -// this is only returned the first time a token is generated -type Token struct { - SecretToken string `json:"secret_token"` - TokenInfo -} - -type ProfileClaims struct { - PreferredUsername string - Name string - Email string - SubjectDN *string - CacUID *int - X509Presented bool -} - -const getProfileForEDIPI = ` - SELECT * FROM v_profile WHERE edipi = $1 -` - -func (q *Queries) GetProfileForEDIPI(ctx context.Context, edipi int) (Profile, error) { - var p Profile - err := q.db.GetContext(ctx, &p, getProfileForEDIPI, edipi) - return p, err -} - -const getProfileForEmail = ` - SELECT * FROM v_profile WHERE email ILIKE $1 -` - -func (q *Queries) GetProfileForEmail(ctx context.Context, email string) (Profile, error) { - var p Profile - err := q.db.GetContext(ctx, &p, getProfileForEmail, email) - return p, err -} - -const getProfileForUsername = ` - SELECT * FROM v_profile WHERE username = $1 -` - -func (q *Queries) GetProfileForUsername(ctx context.Context, username string) (Profile, error) { - var p Profile - err := q.db.GetContext(ctx, &p, getProfileForUsername, username) - return p, err -} - -const getIssuedTokens = ` - SELECT token_id, issued FROM profile_token WHERE profile_id = $1 -` - -func (q *Queries) GetIssuedTokens(ctx context.Context, profileID uuid.UUID) ([]TokenInfoProfile, error) { - tokens := make([]TokenInfoProfile, 0) - err := q.db.SelectContext(ctx, &tokens, getIssuedTokens, profileID) - return tokens, err -} - -const getProfileForTokenID = ` - SELECT p.id, p.edipi, p.username, p.email, p.is_admin - FROM profile_token t - LEFT JOIN v_profile p ON p.id = t.profile_id - WHERE t.token_id = $1 -` - -func (q *Queries) GetProfileForTokenID(ctx context.Context, tokenID string) (Profile, error) { - var p Profile - err := q.db.GetContext(ctx, getProfileForTokenID, tokenID) - return p, err -} - -const createProfile = ` - INSERT INTO profile (edipi, username, email, display_name) VALUES ($1, $2, $3, $4) RETURNING id, username, email, display_name -` - -// CreateProfile creates a new profile -func (q *Queries) CreateProfile(ctx context.Context, n ProfileInfo) (Profile, error) { - p := Profile{ - Tokens: make([]TokenInfoProfile, 0), - Roles: make([]string, 0), - } - err := q.db.GetContext(ctx, &p, createProfile, n.EDIPI, n.Username, n.Email, n.DisplayName) - return p, err -} - -const createProfileToken = ` - INSERT INTO profile_token (token_id, profile_id, hash) VALUES ($1,$2,$3) RETURNING * -` - -// CreateProfileToken creates a secret token and stores the HASH (not the actual token) -// to the database. The return payload of this function is the first and last time you'll see -// the raw token unless the user writes it down or stores it somewhere safe. -func (q *Queries) CreateProfileToken(ctx context.Context, profileID uuid.UUID) (Token, error) { - var t Token - secretToken := password.GenerateRandom(40) - tokenID := password.GenerateRandom(40) - hash, err := password.CreateHash(secretToken, password.DefaultParams) - if err != nil { - return t, err - } - if err := q.db.GetContext(ctx, &t, createProfileToken, tokenID, profileID, hash); err != nil { - return t, err - } - t.SecretToken = secretToken - return t, nil -} - -const getTokenInfoByTokenID = ` - SELECT id, token_id, profile_id, issued, hash FROM profile_token WHERE token_id=$1 LIMIT 1 -` - -// GetTokenInfoByTokenID returns a single token by token id -func (q *Queries) GetTokenInfoByTokenID(ctx context.Context, tokenID string) (TokenInfo, error) { - var n TokenInfo - err := q.db.GetContext(ctx, &n, getTokenInfoByTokenID, tokenID) - return n, err -} - -const updateProfileForEDIPI = `UPDATE profile SET username=$1, email=$2, display_name=$3 WHERE edipi=$4` - -func (q *Queries) UpdateProfileForEDIPI(ctx context.Context, edipi int, pi ProfileInfo) error { - _, err := q.db.ExecContext(ctx, updateProfileForEDIPI, pi.Username, pi.Email, pi.DisplayName, edipi) - return err -} - -const updateProfileForEmail = `UPDATE profile SET username=$1, display_name=$2 WHERE email ILIKE $3` - -func (q *Queries) UpdateProfileForEmail(ctx context.Context, email string, pi ProfileInfo) error { - _, err := q.db.ExecContext(ctx, updateProfileForEmail, pi.Username, pi.DisplayName, email) - return err -} - -const updateProfileForUsername = `UPDATE profile SET email=$1, display_name=$2 WHERE username=$3` - -func (q *Queries) UpdateProfileForUsername(ctx context.Context, username string, pi ProfileInfo) error { - _, err := q.db.ExecContext(ctx, updateProfileForEmail, pi.Email, pi.DisplayName, username) - return err -} - -const deleteToken = ` - DELETE FROM profile_token WHERE profile_id=$1 AND token_id=$2 -` - -// DeleteToken deletes a token by token_id -func (q *Queries) DeleteToken(ctx context.Context, profileID uuid.UUID, tokenID string) error { - _, err := q.db.ExecContext(ctx, deleteToken, profileID, tokenID) - return err -} diff --git a/api/internal/model/project.go b/api/internal/model/project.go deleted file mode 100644 index c5c42b89..00000000 --- a/api/internal/model/project.go +++ /dev/null @@ -1,228 +0,0 @@ -package model - -import ( - "context" - - "github.com/google/uuid" -) - -type District struct { - Agency string `json:"agency" db:"agency"` - ID uuid.UUID `json:"id" db:"id"` - Name string `json:"name" db:"name"` - Initials string `json:"initials" db:"initials"` - DivisionName string `json:"division_name" db:"division_name"` - DivisionInitials string `json:"division_initials" db:"division_initials"` - OfficeID *uuid.UUID `json:"office_id" db:"office_id"` -} - -type Project struct { - ID uuid.UUID `json:"id"` - Slug string `json:"slug"` - Name string `json:"name"` - FederalID *string `json:"federal_id" db:"federal_id"` - DistrictID *uuid.UUID `json:"district_id" db:"district_id"` - OfficeID *uuid.UUID `json:"office_id" db:"office_id"` - Image *string `json:"image" db:"image"` - Deleted bool `json:"-"` - InstrumentCount int `json:"instrument_count" db:"instrument_count"` - InstrumentGroupCount int `json:"instrument_group_count" db:"instrument_group_count"` - AuditInfo -} - -type ProjectCount struct { - ProjectCount int `json:"project_count"` -} - -type ProjectCollection []Project - -const selectProjectsSQL = ` - SELECT - id, federal_id, image, office_id, district_id, deleted, slug, name, creator, creator_username, create_date, - updater, updater_username, update_date, instrument_count, instrument_group_count - FROM v_project -` - -const projectSearch = selectProjectsSQL + ` - WHERE NOT deleted AND name ILIKE '%' || $1 || '%' LIMIT $2 ORDER BY name -` - -// SearchProjects returns search result for projects -func (q *Queries) SearchProjects(ctx context.Context, searchInput string, limit int) ([]SearchResult, error) { - ss := make([]SearchResult, 0) - if err := q.db.SelectContext(ctx, &ss, projectSearch, searchInput, limit); err != nil { - return nil, err - } - rr := make([]SearchResult, len(ss)) - for idx, p := range ss { - rr[idx] = SearchResult{ID: p.ID, Type: "project", Item: p} - } - return rr, nil -} - -const listDistricts = ` - SELECT * FROM v_district -` - -func (q *Queries) ListDistricts(ctx context.Context) ([]District, error) { - dd := make([]District, 0) - if err := q.db.SelectContext(ctx, &dd, listDistricts); err != nil { - return nil, err - } - return dd, nil -} - -const listProjects = selectProjectsSQL + ` - WHERE NOT deleted ORDER BY name -` - -// ListProjects returns a slice of projects -func (q *Queries) ListProjects(ctx context.Context) ([]Project, error) { - pp := make([]Project, 0) - if err := q.db.SelectContext(ctx, &pp, listProjects); err != nil { - return nil, err - } - return pp, nil -} - -const listProjectsByFederalID = selectProjectsSQL + ` - WHERE federal_id IS NOT NULL AND federal_id = $1 AND NOT deleted ORDER BY name -` - -// ListProjects returns a slice of projects -func (q *Queries) ListProjectsByFederalID(ctx context.Context, federalID string) ([]Project, error) { - pp := make([]Project, 0) - if err := q.db.SelectContext(ctx, &pp, listProjectsByFederalID, federalID); err != nil { - return nil, err - } - return pp, nil -} - -const listProjectsForProfile = selectProjectsSQL + ` - WHERE id = ANY( - SELECT project_id FROM profile_project_roles - WHERE profile_id = $1 - ) - AND NOT deleted - ORDER BY name -` - -func (q *Queries) ListProjectsForProfile(ctx context.Context, profileID uuid.UUID) ([]Project, error) { - pp := make([]Project, 0) - if err := q.db.SelectContext(ctx, &pp, listProjectsForProfile, profileID); err != nil { - return nil, err - } - return pp, nil -} - -const listProjectsForProfileRole = selectProjectsSQL + ` - WHERE id = ANY( - SELECT project_id FROM profile_project_roles pr - INNER JOIN role r ON r.id = pr.role_id - WHERE pr.profile_id = $1 - AND r.name = $2 - ) - AND NOT deleted - ORDER BY name -` - -func (q *Queries) ListProjectsForProfileRole(ctx context.Context, profileID uuid.UUID, role string) ([]Project, error) { - pp := make([]Project, 0) - err := q.db.SelectContext(ctx, &pp, listProjectsForProfileRole, profileID, role) - return pp, err -} - -const listProjectInstruments = listInstrumentsSQL + ` - WHERE id = ANY( - SELECT instrument_id - FROM project_instrument - WHERE project_id = $1 - ) - AND NOT deleted -` - -// ListProjectInstruments returns a slice of instruments for a project -func (q *Queries) ListProjectInstruments(ctx context.Context, projectID uuid.UUID) ([]Instrument, error) { - ii := make([]Instrument, 0) - if err := q.db.SelectContext(ctx, &ii, listProjectInstruments, projectID); err != nil { - return nil, err - } - return ii, nil -} - -const listProjectInstrumentGroups = listInstrumentGroupsSQL + ` - WHERE project_id = $1 AND NOT deleted -` - -// ListProjectInstrumentGroups returns a list of instrument groups for a project -func (q *Queries) ListProjectInstrumentGroups(ctx context.Context, projectID uuid.UUID) ([]InstrumentGroup, error) { - gg := make([]InstrumentGroup, 0) - if err := q.db.SelectContext(ctx, &gg, listProjectInstrumentGroups, projectID); err != nil { - return nil, err - } - return gg, nil -} - -const getProjectCount = ` - SELECT COUNT(id) FROM project WHERE NOT deleted -` - -// GetProjectCount returns the number of projects in the database that are not deleted -func (q *Queries) GetProjectCount(ctx context.Context) (ProjectCount, error) { - var pc ProjectCount - if err := q.db.GetContext(ctx, &pc.ProjectCount, getProjectCount); err != nil { - return pc, err - } - return pc, nil -} - -const getProject = selectProjectsSQL + ` - WHERE id = $1 -` - -func (q *Queries) GetProject(ctx context.Context, id uuid.UUID) (Project, error) { - var p Project - err := q.db.GetContext(ctx, &p, getProject, id) - return p, err -} - -const createProject = ` - INSERT INTO project (federal_id, slug, name, district_id, creator, create_date) - VALUES ($1, slugify($2, 'project'), $2, $3, $4, $5) - RETURNING id, slug -` - -func (q *Queries) CreateProject(ctx context.Context, p Project) (IDSlugName, error) { - var aa IDSlugName - err := q.db.GetContext(ctx, &aa, createProject, p.FederalID, p.Name, p.DistrictID, p.CreatorID, p.CreateDate) - return aa, err -} - -const updateProject = ` - UPDATE project SET name=$2, updater=$3, update_date=$4, district_id=$5, federal_id=$6 WHERE id=$1 RETURNING id -` - -// UpdateProject updates a project -func (q *Queries) UpdateProject(ctx context.Context, p Project) error { - _, err := q.db.ExecContext(ctx, updateProject, p.ID, p.Name, p.UpdaterID, p.UpdateDate, p.DistrictID, p.FederalID) - return err -} - -const updateProjectImage = ` - UPDATE project SET image = $1 WHERE project_id = $2 -` - -func (q *Queries) UpdateProjectImage(ctx context.Context, fileName string, projectID uuid.UUID) error { - _, err := q.db.ExecContext(ctx, updateProjectImage, fileName, projectID) - return err -} - -const deleteFlagProject = ` - UPDATE project SET deleted=true WHERE id = $1 -` - -// DeleteFlagProject sets deleted to true for a project -func (q *Queries) DeleteFlagProject(ctx context.Context, id uuid.UUID) error { - _, err := q.db.ExecContext(ctx, deleteFlagProject, id) - return err -} diff --git a/api/internal/model/project_role.go b/api/internal/model/project_role.go deleted file mode 100644 index 8a5b38a6..00000000 --- a/api/internal/model/project_role.go +++ /dev/null @@ -1,101 +0,0 @@ -package model - -import ( - "context" - - "github.com/google/uuid" -) - -// ProjectMembership holds -type ProjectMembership struct { - ID uuid.UUID `json:"id" db:"id"` - ProfileID uuid.UUID `json:"profile_id" db:"profile_id"` - Username *string `json:"username"` - Email string `json:"email"` - RoleID uuid.UUID `json:"role_id" db:"role_id"` - Role string `json:"role"` -} - -const listProjectMembers = ` - SELECT id, profile_id, username, email, role_id, role - FROM v_profile_project_roles - WHERE project_id = $1 - ORDER BY email -` - -// ListProjectMembers lists users (profiles) who have permissions on a project and their role info -func (q *Queries) ListProjectMembers(ctx context.Context, projectID uuid.UUID) ([]ProjectMembership, error) { - rr := make([]ProjectMembership, 0) - if err := q.db.SelectContext(ctx, &rr, listProjectMembers, projectID); err != nil { - return nil, err - } - return rr, nil -} - -const getProjectMembership = ` - SELECT id, profile_id, username, email, role_id, role - FROM v_profile_project_roles - WHERE id = $1 -` - -func (q *Queries) GetProjectMembership(ctx context.Context, roleID uuid.UUID) (ProjectMembership, error) { - var pm ProjectMembership - err := q.db.GetContext(ctx, &pm, getProjectMembership, roleID) - return pm, err -} - -const addProjectMemberRole = ` - INSERT INTO profile_project_roles (project_id, profile_id, role_id, granted_by) - VALUES ($1, $2, $3, $4) - ON CONFLICT ON CONSTRAINT unique_profile_project_role DO UPDATE SET project_id = EXCLUDED.project_id - RETURNING id -` - -// AddProjectMemberRole adds a role to a user for a specific project -func (q *Queries) AddProjectMemberRole(ctx context.Context, projectID, profileID, roleID, grantedBy uuid.UUID) (uuid.UUID, error) { - var roleIDNew uuid.UUID - err := q.db.GetContext(ctx, &roleIDNew, addProjectMemberRole, projectID, profileID, roleID, grantedBy) - return roleIDNew, err -} - -const removeProjectMemberRole = ` - DELETE FROM profile_project_roles WHERE project_id = $1 AND profile_id = $2 AND role_id = $3 -` - -// RemoveProjectMemberRole removes a role from a user for a specific project -func (q *Queries) RemoveProjectMemberRole(ctx context.Context, projectID, profileID, roleID uuid.UUID) error { - _, err := q.db.ExecContext(ctx, removeProjectMemberRole, projectID, profileID, roleID) - return err -} - -const isProjectAdmin = ` - SELECT EXISTS ( - SELECT 1 FROM profile_project_roles pr - INNER JOIN role r ON r.id = pr.role_id - WHERE pr.profile_id = $1 - AND pr.project_id = $2 - AND r.name = 'ADMIN' - ) -` - -func (q *Queries) IsProjectAdmin(ctx context.Context, profileID, projectID uuid.UUID) (bool, error) { - var isAdmin bool - err := q.db.GetContext(ctx, &isAdmin, isProjectAdmin, projectID) - return isAdmin, err -} - -const isProjectMember = ` - SELECT EXISTS ( - SELECT 1 FROM profile_project_roles pr - INNER JOIN role r ON r.id = pr.role_id - WHERE pr.profile_id = $1 - AND pr.project_id = $2 - AND (r.name = 'MEMBER' OR r.name = 'ADMIN') - ) -` - -func (q *Queries) IsProjectMember(ctx context.Context, profileID, projectID uuid.UUID) (bool, error) { - var isMember bool - err := q.db.GetContext(ctx, &isMember, isProjectMember, projectID) - return isMember, err -} diff --git a/api/internal/model/report_config.go b/api/internal/model/report_config.go deleted file mode 100644 index af5e38c5..00000000 --- a/api/internal/model/report_config.go +++ /dev/null @@ -1,206 +0,0 @@ -package model - -import ( - "context" - "encoding/json" - "fmt" - "time" - - "github.com/google/uuid" -) - -type ReportConfig struct { - ID uuid.UUID `json:"id" db:"id"` - Slug string `json:"slug" db:"slug"` - Name string `json:"name" db:"name"` - Description string `json:"description" db:"description"` - ProjectID uuid.UUID `json:"project_id" db:"project_id"` - ProjectName string `json:"project_name" db:"project_name"` - DistrictName *string `json:"district_name" db:"district_name"` - PlotConfigs dbJSONSlice[IDSlugName] `json:"plot_configs" db:"plot_configs"` - GlobalOverrides ReportConfigGlobalOverrides `json:"global_overrides" db:"global_overrides"` - AuditInfo -} - -type ReportConfigGlobalOverrides struct { - DateRange TextOption `json:"date_range" db:"date_range"` - ShowMasked ToggleOption `json:"show_masked" db:"show_masked"` - ShowNonvalidated ToggleOption `json:"show_nonvalidated" db:"show_nonvalidated"` -} - -type TextOption struct { - Enabled bool `json:"enabled" db:"enabled"` - Value string `json:"value" db:"value"` -} - -type ToggleOption struct { - Enabled bool `json:"enabled" db:"enabled"` - Value bool `json:"value" db:"value"` -} - -type ReportDownloadJob struct { - ID uuid.UUID `json:"id" db:"id"` - ReportConfigID uuid.UUID `json:"report_config_id" db:"report_config_id"` - Creator uuid.UUID `json:"creator" db:"creator"` - CreateDate time.Time `json:"create_date" db:"create_date"` - Status string `json:"status" db:"status"` - FileKey *string `json:"file_key" db:"file_key"` - FileExpiry *time.Time `json:"file_expiry" db:"file_expiry"` - Progress int `json:"progress" db:"progress"` - ProgressUpdateDate time.Time `json:"progress_update_date" db:"progress_update_date"` -} - -func (o *ReportConfigGlobalOverrides) Scan(src interface{}) error { - b, ok := src.(string) - if !ok { - return fmt.Errorf("type assertion failed") - } - return json.Unmarshal([]byte(b), o) -} - -type ReportConfigWithPlotConfigs struct { - ReportConfig - PlotConfigs []PlotConfigScatterLinePlot `json:"plot_configs"` -} - -type ReportConfigJobMessage struct { - ReportConfigID uuid.UUID `json:"report_config_id"` - JobID uuid.UUID `json:"job_id"` - IsLandscape bool `json:"is_landscape"` -} - -const createReportConfig = ` - INSERT INTO report_config ( - name, slug, project_id, creator, description, date_range, date_range_enabled, - show_masked, show_masked_enabled, show_nonvalidated, show_nonvalidated_enabled - ) - VALUES ($1, slugify($1, 'report_config'), $2, $3, $4, $5, $6, $7, $8, $9, $10) - RETURNING id -` - -func (q *Queries) CreateReportConfig(ctx context.Context, rc ReportConfig) (uuid.UUID, error) { - var rcID uuid.UUID - err := q.db.GetContext( - ctx, &rcID, createReportConfig, rc.Name, rc.ProjectID, rc.CreatorID, rc.Description, - rc.GlobalOverrides.DateRange.Value, rc.GlobalOverrides.DateRange.Enabled, - rc.GlobalOverrides.ShowMasked.Value, rc.GlobalOverrides.ShowMasked.Enabled, - rc.GlobalOverrides.ShowNonvalidated.Value, rc.GlobalOverrides.ShowNonvalidated.Enabled, - ) - return rcID, err -} - -const listProjectReportConfigs = ` - SELECT * FROM v_report_config WHERE project_id = $1 -` - -func (q *Queries) ListProjectReportConfigs(ctx context.Context, projectID uuid.UUID) ([]ReportConfig, error) { - rcs := make([]ReportConfig, 0) - err := q.db.SelectContext(ctx, &rcs, listProjectReportConfigs, projectID) - return rcs, err -} - -const listReportConfigPlotConfigs = ` - SELECT * FROM v_plot_configuration WHERE id = ANY( - SELECT plot_config_id FROM report_config_plot_config WHERE report_config_id = $1 - ) -` - -func (q *Queries) ListReportConfigPlotConfigs(ctx context.Context, rcID uuid.UUID) ([]PlotConfigScatterLinePlot, error) { - pcs := make([]PlotConfigScatterLinePlot, 0) - err := q.db.SelectContext(ctx, &pcs, listReportConfigPlotConfigs, rcID) - return pcs, err -} - -const getReportConfigByID = ` - SELECT * FROM v_report_config WHERE id = $1 -` - -func (q *Queries) GetReportConfigByID(ctx context.Context, rcID uuid.UUID) (ReportConfig, error) { - var rc ReportConfig - err := q.db.GetContext(ctx, &rc, getReportConfigByID, rcID) - return rc, err -} - -const updateReportConfig = ` - UPDATE report_config SET name=$2, - updater=$3, update_date=$4, description=$5, date_range=$6, date_range_enabled=$7, show_masked=$8, - show_masked_enabled=$9, show_nonvalidated=$10, show_nonvalidated_enabled=$11 WHERE id=$1 -` - -func (q *Queries) UpdateReportConfig(ctx context.Context, rc ReportConfig) error { - _, err := q.db.ExecContext( - ctx, updateReportConfig, rc.ID, rc.Name, rc.UpdaterID, rc.UpdateDate, rc.Description, - rc.GlobalOverrides.DateRange.Value, rc.GlobalOverrides.DateRange.Enabled, - rc.GlobalOverrides.ShowMasked.Value, rc.GlobalOverrides.ShowMasked.Enabled, - rc.GlobalOverrides.ShowNonvalidated.Value, rc.GlobalOverrides.ShowNonvalidated.Enabled, - ) - return err -} - -const deleteReportConfig = ` - DELETE FROM report_config WHERE id=$1 -` - -func (q *Queries) DeleteReportConfig(ctx context.Context, rcID uuid.UUID) error { - _, err := q.db.ExecContext(ctx, deleteReportConfig, rcID) - return err -} - -const assignReportConfigPlotConfig = ` - INSERT INTO report_config_plot_config (report_config_id, plot_config_id) VALUES ($1, $2) -` - -func (q *Queries) AssignReportConfigPlotConfig(ctx context.Context, rcID, pcID uuid.UUID) error { - _, err := q.db.ExecContext(ctx, assignReportConfigPlotConfig, rcID, pcID) - return err -} - -const unassignReportConfigPlotConfig = ` - DELETE FROM report_config_plot_config WHERE report_config_id=$1 AND plot_config_id=$2 -` - -func (q *Queries) UnassignReportConfigPlotConfig(ctx context.Context, rcID, pcID uuid.UUID) error { - _, err := q.db.ExecContext(ctx, assignReportConfigPlotConfig, rcID, pcID) - return err -} - -const unassignAllReportConfigPlotConfig = ` - DELETE FROM report_config_plot_config WHERE report_config_id=$1 -` - -func (q *Queries) UnassignAllReportConfigPlotConfig(ctx context.Context, rcID uuid.UUID) error { - _, err := q.db.ExecContext(ctx, unassignAllReportConfigPlotConfig, rcID) - return err -} - -const getReportDownloadJob = ` - SELECT * FROM report_download_job WHERE id=$1 AND creator=$2 -` - -func (q *Queries) GetReportDownloadJob(ctx context.Context, jobID, profileID uuid.UUID) (ReportDownloadJob, error) { - var j ReportDownloadJob - err := q.db.GetContext(ctx, &j, getReportDownloadJob, jobID, profileID) - return j, err -} - -const createReportDownloadJob = ` - INSERT INTO report_download_job (report_config_id, creator) VALUES ($1, $2) RETURNING * -` - -func (q *Queries) CreateReportDownloadJob(ctx context.Context, rcID, profileID uuid.UUID) (ReportDownloadJob, error) { - var jNew ReportDownloadJob - err := q.db.GetContext(ctx, &jNew, createReportDownloadJob, rcID, profileID) - return jNew, err -} - -const updateReportDownloadJob = ` - UPDATE report_download_job SET status=$2, progress=$3, progress_update_date=$4, file_key=$5, file_expiry=$6 WHERE id=$1 -` - -func (q *Queries) UpdateReportDownloadJob(ctx context.Context, j ReportDownloadJob) error { - _, err := q.db.ExecContext( - ctx, updateReportDownloadJob, - j.ID, j.Status, j.Progress, j.ProgressUpdateDate, j.FileKey, j.FileExpiry, - ) - return err -} diff --git a/api/internal/model/submittal.go b/api/internal/model/submittal.go deleted file mode 100644 index 24ec96f0..00000000 --- a/api/internal/model/submittal.go +++ /dev/null @@ -1,149 +0,0 @@ -package model - -import ( - "context" - "time" - - "github.com/google/uuid" -) - -type Submittal struct { - ID uuid.UUID `json:"id" db:"id"` - AlertConfigID uuid.UUID `json:"alert_config_id" db:"alert_config_id"` - AlertConfigName string `json:"alert_config_name" db:"alert_config_name"` - AlertTypeID uuid.UUID `json:"alert_type_id" db:"alert_type_id"` - AlertTypeName string `json:"alert_type_name" db:"alert_type_name"` - ProjectID uuid.UUID `json:"project_id" db:"project_id"` - SubmittalStatusID uuid.UUID `json:"submittal_status_id" db:"submittal_status_id"` - SubmittalStatusName string `json:"submittal_status_name" db:"submittal_status_name"` - CompletionDate *time.Time `json:"completion_date" db:"completion_date"` - CreateDate time.Time `json:"create_date" db:"create_date"` - DueDate time.Time `json:"due_date" db:"due_date"` - MarkedAsMissing bool `json:"marked_as_missing" db:"marked_as_missing"` - WarningSent bool `json:"warning_sent" db:"warning_sent"` -} - -const missingFilter = ` - AND completion_date IS NULL AND NOT marked_as_missing -` - -func (q *Queries) ListProjectSubmittals(ctx context.Context, projectID uuid.UUID, showMissing bool) ([]Submittal, error) { - var filter string - if showMissing { - filter = missingFilter - } - listProjectSubmittals := ` - SELECT * - FROM v_submittal - WHERE project_id = $1 - ` + filter + ` - ORDER BY due_date DESC, alert_type_name ASC - ` - - aa := make([]Submittal, 0) - if err := q.db.SelectContext(ctx, &aa, listProjectSubmittals, projectID); err != nil { - return aa, err - } - return aa, nil -} - -func (q *Queries) ListInstrumentSubmittals(ctx context.Context, instrumentID uuid.UUID, showMissing bool) ([]Submittal, error) { - var filter string - if showMissing { - filter = missingFilter - } - listInstrumentSubmittals := ` - SELECT * - FROM v_submittal - WHERE id = ANY( - SELECT sub.id - FROM submittal sub - INNER JOIN alert_config_instrument aci ON aci.alert_config_id = sub.alert_config_id - WHERE aci.instrument_id = $1 - ) - ` + filter + ` - ORDER BY due_date DESC - ` - aa := make([]Submittal, 0) - if err := q.db.SelectContext(ctx, &aa, listInstrumentSubmittals, instrumentID); err != nil { - return aa, err - } - return aa, nil -} - -func (q *Queries) ListAlertConfigSubmittals(ctx context.Context, alertConfigID uuid.UUID, showMissing bool) ([]Submittal, error) { - var filter string - if showMissing { - filter = missingFilter - } - listAlertConfigSubmittals := ` - SELECT * - FROM v_submittal - WHERE alert_config_id = $1 - ` + filter + ` - ORDER BY due_date DESC - ` - aa := make([]Submittal, 0) - if err := q.db.SelectContext(ctx, &aa, listAlertConfigSubmittals, alertConfigID); err != nil { - return aa, err - } - return aa, nil -} - -const listUnverifiedMissingSubmittals = ` - SELECT * - FROM v_submittal - WHERE completion_date IS NULL - AND NOT marked_as_missing - ORDER BY due_date DESC -` - -func (q *Queries) ListUnverifiedMissingSubmittals(ctx context.Context) ([]Submittal, error) { - aa := make([]Submittal, 0) - if err := q.db.SelectContext(ctx, &aa, listUnverifiedMissingSubmittals); err != nil { - return nil, err - } - return aa, nil -} - -const updateSubmittal = ` - UPDATE submittal SET - submittal_status_id = $2, - completion_date = $3, - warning_sent = $4 - WHERE id = $1 -` - -func (q *Queries) UpdateSubmittal(ctx context.Context, sub Submittal) error { - _, err := q.db.ExecContext(ctx, updateSubmittal, sub.ID, sub.SubmittalStatusID, sub.CompletionDate, sub.WarningSent) - return err -} - -const verifyMissingSubmittal = ` - UPDATE submittal SET - -- red submittal status - submittal_status_id = '84a0f437-a20a-4ac2-8a5b-f8dc35e8489b'::UUID, - marked_as_missing = true - WHERE id = $1 - AND completion_date IS NULL - AND NOW() > due_date -` - -func (q *Queries) VerifyMissingSubmittal(ctx context.Context, submittalID uuid.UUID) error { - _, err := q.db.ExecContext(ctx, verifyMissingSubmittal, submittalID) - return err -} - -const verifyMissingAlertConfigSubmittals = ` - UPDATE submittal SET - submittal_status_id = '84a0f437-a20a-4ac2-8a5b-f8dc35e8489b'::UUID, - marked_as_missing = true - WHERE alert_config_id = $1 - AND completion_date IS NULL - AND NOW() > due_date -` - -func (q *Queries) VerifyMissingAlertConfigSubmittals(ctx context.Context, alertConfigID uuid.UUID) error { - _, err := q.db.ExecContext(ctx, verifyMissingAlertConfigSubmittals, alertConfigID) - return err -} diff --git a/api/internal/model/timeseries.go b/api/internal/model/timeseries.go deleted file mode 100644 index 95e98a4b..00000000 --- a/api/internal/model/timeseries.go +++ /dev/null @@ -1,220 +0,0 @@ -package model - -import ( - "context" - "encoding/json" - - "github.com/USACE/instrumentation-api/api/internal/util" - "github.com/google/uuid" -) - -const ( - StandardTimeseriesType = "standard" - ConstantTimeseriesType = "constant" - ComputedTimeseriesType = "computed" - CwmsTimeseriesType = "cwms" -) - -type Timeseries struct { - ID uuid.UUID `json:"id"` - Slug string `json:"slug"` - Name string `json:"name"` - Variable string `json:"variable"` - InstrumentID uuid.UUID `json:"instrument_id" db:"instrument_id"` - InstrumentSlug string `json:"instrument_slug" db:"instrument_slug"` - Instrument string `json:"instrument,omitempty"` - ParameterID uuid.UUID `json:"parameter_id" db:"parameter_id"` - Parameter string `json:"parameter,omitempty"` - UnitID uuid.UUID `json:"unit_id" db:"unit_id"` - Unit string `json:"unit,omitempty"` - Values []Measurement `json:"values,omitempty"` - Type string `json:"type" db:"type"` - IsComputed bool `json:"is_computed" db:"is_computed"` -} - -type TimeseriesNote struct { - Masked *bool `json:"masked,omitempty"` - Validated *bool `json:"validated,omitempty"` - Annotation *string `json:"annotation,omitempty"` -} - -type TimeseriesCollectionItems struct { - Items []Timeseries -} - -func (c *TimeseriesCollectionItems) UnmarshalJSON(b []byte) error { - switch util.JSONType(b) { - case "ARRAY": - if err := json.Unmarshal(b, &c.Items); err != nil { - return err - } - case "OBJECT": - var t Timeseries - if err := json.Unmarshal(b, &t); err != nil { - return err - } - c.Items = []Timeseries{t} - default: - c.Items = make([]Timeseries, 0) - } - return nil -} - -var ( - unknownParameterID = uuid.MustParse("2b7f96e1-820f-4f61-ba8f-861640af6232") - unknownUnitID = uuid.MustParse("4a999277-4cf5-4282-93ce-23b33c65e2c8") -) - -const getStoredTimeseriesExists = ` - SELECT EXISTS (SELECT id FROM v_timeseries_stored WHERE id = $1) -` - -// ValidateStoredTimeseries returns an error if the timeseries id does not exist or the timeseries is computed -func (q *Queries) GetStoredTimeseriesExists(ctx context.Context, timeseriesID uuid.UUID) (bool, error) { - var isStored bool - if err := q.db.GetContext(ctx, &isStored, getStoredTimeseriesExists, ×eriesID); err != nil { - return false, err - } - return isStored, nil -} - -const getTimeseriesProjectMap = ` - SELECT timeseries_id, project_id - FROM v_timeseries_project_map - WHERE timeseries_id IN (?) -` - -func (q *Queries) GetTimeseriesProjectMap(ctx context.Context, timeseriesIDs []uuid.UUID) (map[uuid.UUID]uuid.UUID, error) { - query, args, err := sqlIn(getTimeseriesProjectMap, timeseriesIDs) - if err != nil { - return nil, err - } - query = q.db.Rebind(query) - var result []struct { - TimeseriesID uuid.UUID `db:"timeseries_id"` - ProjectID uuid.UUID `db:"project_id"` - } - if err = q.db.SelectContext(ctx, &result, query, args...); err != nil { - return nil, err - } - m := make(map[uuid.UUID]uuid.UUID) - for _, r := range result { - m[r.TimeseriesID] = r.ProjectID - } - return m, nil -} - -const listProjectTimeseries = ` - SELECT t.* FROM v_timeseries t - INNER JOIN project_instrument p ON p.instrument_id = t.instrument_id - WHERE p.project_id = $1 -` - -// ListProjectTimeseries lists all timeseries for a given project -func (q *Queries) ListProjectTimeseries(ctx context.Context, projectID uuid.UUID) ([]Timeseries, error) { - tt := make([]Timeseries, 0) - if err := q.db.SelectContext(ctx, &tt, listProjectTimeseries, projectID); err != nil { - return make([]Timeseries, 0), err - } - - return tt, nil -} - -const listInstrumentTimeseries = ` - SELECT * FROM v_timeseries - WHERE instrument_id = $1 -` - -func (q *Queries) ListInstrumentTimeseries(ctx context.Context, instrumentID uuid.UUID) ([]Timeseries, error) { - tt := make([]Timeseries, 0) - if err := q.db.Select(&tt, listInstrumentTimeseries, instrumentID); err != nil { - return nil, err - } - return tt, nil -} - -const listPlotConfigTimeseries = ` - SELECT t.* FROM v_timeseries t - INNER JOIN plot_configuration_timeseries pct ON pct.timeseries_id = t.id - WHERE pct.plot_configuration_id = $1 -` - -func (q *Queries) ListPlotConfigTimeseries(ctx context.Context, plotConfigID uuid.UUID) ([]Timeseries, error) { - tt := make([]Timeseries, 0) - if err := q.db.Select(&tt, listPlotConfigTimeseries, plotConfigID); err != nil { - return nil, err - } - return tt, nil -} - -const listInstrumentGroupTimeseries = ` - SELECT t.* FROM v_timeseries t - INNER JOIN instrument_group_instruments gi ON gi.instrument_id = t.instrument_id - WHERE gi.instrument_group_id = $1 -` - -func (q *Queries) ListInstrumentGroupTimeseries(ctx context.Context, instrumentGroupID uuid.UUID) ([]Timeseries, error) { - tt := make([]Timeseries, 0) - if err := q.db.SelectContext(ctx, &tt, listInstrumentGroupTimeseries, instrumentGroupID); err != nil { - return nil, err - } - return tt, nil -} - -const getTimeseries = ` - SELECT * FROM v_timeseries WHERE id = $1 -` - -func (q *Queries) GetTimeseries(ctx context.Context, timeseriesID uuid.UUID) (Timeseries, error) { - var t Timeseries - err := q.db.GetContext(ctx, &t, getTimeseries, timeseriesID) - return t, err -} - -const createTimeseries = ` - INSERT INTO timeseries (instrument_id, slug, name, parameter_id, unit_id, type) - VALUES ($1, slugify($2, 'timeseries'), $2, $3, $4, $5) - RETURNING id, instrument_id, slug, name, parameter_id, unit_id, type -` - -func (q *Queries) CreateTimeseries(ctx context.Context, ts Timeseries) (Timeseries, error) { - if ts.ParameterID == uuid.Nil { - ts.ParameterID = unknownParameterID - } - if ts.UnitID == uuid.Nil { - ts.UnitID = unknownUnitID - } - if ts.Type == "" { - ts.Type = StandardTimeseriesType - } - var tsNew Timeseries - err := q.db.GetContext(ctx, &tsNew, createTimeseries, ts.InstrumentID, ts.Name, ts.ParameterID, ts.UnitID, ts.Type) - return tsNew, err -} - -const updateTimeseries = ` - UPDATE timeseries SET name = $2, instrument_id = $3, parameter_id = $4, unit_id = $5 - WHERE id = $1 - RETURNING id -` - -func (q *Queries) UpdateTimeseries(ctx context.Context, ts Timeseries) (uuid.UUID, error) { - if ts.ParameterID == uuid.Nil { - ts.ParameterID = unknownParameterID - } - if ts.UnitID == uuid.Nil { - ts.UnitID = unknownUnitID - } - var tID uuid.UUID - err := q.db.GetContext(ctx, &tID, updateTimeseries, ts.ID, ts.Name, ts.InstrumentID, ts.ParameterID, ts.UnitID) - return tID, err -} - -const deleteTimeseries = ` - DELETE FROM timeseries WHERE id = $1 -` - -func (q *Queries) DeleteTimeseries(ctx context.Context, timeseriesID uuid.UUID) error { - _, err := q.db.ExecContext(ctx, deleteTimeseries, timeseriesID) - return err -} diff --git a/api/internal/model/timeseries_calculated.go b/api/internal/model/timeseries_calculated.go deleted file mode 100644 index c97318dd..00000000 --- a/api/internal/model/timeseries_calculated.go +++ /dev/null @@ -1,140 +0,0 @@ -package model - -import ( - "context" - - "github.com/google/uuid" -) - -type CalculatedTimeseries struct { - ID uuid.UUID `json:"id" db:"id"` - InstrumentID uuid.UUID `json:"instrument_id" db:"instrument_id"` - ParameterID uuid.UUID `json:"parameter_id" db:"parameter_id"` - UnitID uuid.UUID `json:"unit_id" db:"unit_id"` - Slug string `json:"slug" db:"slug"` - FormulaName string `json:"formula_name" db:"formula_name"` - Formula string `json:"formula" db:"formula"` -} - -const listCalculatedTimeseriesSQL = ` - SELECT - id, - instrument_id, - parameter_id, - unit_id, - slug, - name AS formula_name, - COALESCE(contents, '') AS formula - FROM v_timeseries_computed -` - -const getAllCalculatedTimeseriesForInstrument = listCalculatedTimeseriesSQL + ` - WHERE instrument_id = $1 -` - -// GetAllCalculationsForInstrument returns all formulas associated to a given instrument ID. -func (q *Queries) GetAllCalculatedTimeseriesForInstrument(ctx context.Context, instrumentID uuid.UUID) ([]CalculatedTimeseries, error) { - cc := make([]CalculatedTimeseries, 0) - if err := q.db.SelectContext(ctx, &cc, getAllCalculatedTimeseriesForInstrument, instrumentID); err != nil { - return nil, err - } - return cc, nil -} - -const createCalculatedTimeseries = ` - INSERT INTO timeseries ( - instrument_id, - parameter_id, - unit_id, - slug, - name, - type - ) VALUES ($1, $2, $3, slugify($4, 'timeseries'), $4, 'computed') - RETURNING id -` - -func (q *Queries) CreateCalculatedTimeseries(ctx context.Context, cc CalculatedTimeseries) (uuid.UUID, error) { - if cc.ParameterID == uuid.Nil { - cc.ParameterID = unknownParameterID - } - if cc.UnitID == uuid.Nil { - cc.UnitID = unknownUnitID - } - var tsID uuid.UUID - err := q.db.GetContext(ctx, &tsID, createCalculatedTimeseries, &cc.InstrumentID, &cc.ParameterID, &cc.UnitID, &cc.FormulaName) - return tsID, err -} - -const createCalculation = ` - INSERT INTO calculation (timeseries_id, contents) VALUES ($1,$2) -` - -func (q *Queries) CreateCalculation(ctx context.Context, timeseriesID uuid.UUID, contents string) error { - _, err := q.db.ExecContext(ctx, createCalculation, timeseriesID, contents) - return err -} - -const getOneCalculation = listCalculatedTimeseriesSQL + ` - WHERE id = $1 -` - -func (q *Queries) GetOneCalculation(ctx context.Context, calculationID *uuid.UUID) (CalculatedTimeseries, error) { - var defaultCc CalculatedTimeseries - err := q.db.GetContext(ctx, &defaultCc, getOneCalculation, calculationID) - return defaultCc, err -} - -const createOrUpdateCalculation = ` - INSERT INTO calculation (timeseries_id, contents) VALUES ($1, $2) - ON CONFLICT (timeseries_id) DO UPDATE SET contents = COALESCE(EXCLUDED.contents, $3) -` - -func (q *Queries) CreateOrUpdateCalculation(ctx context.Context, timeseriesID uuid.UUID, formula, defaultFormula string) error { - _, err := q.db.ExecContext(ctx, createOrUpdateCalculation, timeseriesID, formula, defaultFormula) - return err -} - -const deleteCalculatedTimeseries = ` - DELETE FROM timeseries WHERE id = $1 AND id IN (SELECT timeseries_id FROM calculation) -` - -func (q *Queries) DeleteCalculatedTimeseries(ctx context.Context, calculationID uuid.UUID) error { - _, err := q.db.ExecContext(ctx, deleteCalculatedTimeseries, calculationID) - return err -} - -const createOrUpdateCalculatedTimeseries = ` - INSERT INTO timeseries ( - id, - instrument_id, - parameter_id, - unit_id, - slug, - name, - type - ) VALUES ($1, $2, $3, $4, slugify($5, 'timeseries'), $5, 'computed') - ON CONFLICT (id) DO UPDATE SET - instrument_id = COALESCE(EXCLUDED.instrument_id, $6), - parameter_id = COALESCE(EXCLUDED.parameter_id, $7), - unit_id = COALESCE(EXCLUDED.unit_id, $8), - slug = COALESCE(EXCLUDED.slug, slugify($9, 'timeseries')), - name = COALESCE(EXCLUDED.name, $9), - type = 'computed' -` - -func (q *Queries) CreateOrUpdateCalculatedTimeseries(ctx context.Context, cc CalculatedTimeseries, defaultCc CalculatedTimeseries) error { - if _, err := q.db.ExecContext(ctx, createOrUpdateCalculatedTimeseries, - cc.ID, - cc.InstrumentID, - cc.ParameterID, - cc.UnitID, - cc.FormulaName, - defaultCc.InstrumentID, - defaultCc.ParameterID, - defaultCc.UnitID, - defaultCc.FormulaName, - ); err != nil { - return err - } - return nil -} diff --git a/api/internal/model/timeseries_cwms.go b/api/internal/model/timeseries_cwms.go deleted file mode 100644 index 84072142..00000000 --- a/api/internal/model/timeseries_cwms.go +++ /dev/null @@ -1,66 +0,0 @@ -package model - -import ( - "context" - "time" - - "github.com/google/uuid" -) - -type TimeseriesCwms struct { - Timeseries - CwmsTimeseriesID string `json:"cwms_timeseries_id" db:"cwms_timeseries_id"` - CwmsOfficeID string `json:"cwms_office_id" db:"cwms_office_id"` - CwmsExtentEarliestTime time.Time `json:"cwms_extent_earliest_time" db:"cwms_extent_earliest_time"` - CwmsExtentLatestTime *time.Time `json:"cwms_extent_latest_time" db:"cwms_extent_latest_time"` -} - -const listTimeseriesCwms = ` - SELECT * FROM v_timeseries_cwms - WHERE instrument_id = $1 -` - -func (q *Queries) ListTimeseriesCwms(ctx context.Context, instrumentID uuid.UUID) ([]TimeseriesCwms, error) { - tss := make([]TimeseriesCwms, 0) - err := q.db.SelectContext(ctx, &tss, listTimeseriesCwms, instrumentID) - return tss, err -} - -const getTimeseriesCwms = ` - SELECT * FROM v_timeseries_cwms - WHERE id = $1 -` - -func (q *Queries) GetTimeseriesCwms(ctx context.Context, timeseriesID uuid.UUID) (TimeseriesCwms, error) { - var t TimeseriesCwms - err := q.db.GetContext(ctx, &t, getTimeseriesCwms, timeseriesID) - return t, err -} - -const createTimeseriesCwms = ` - INSERT INTO timeseries_cwms (timeseries_id, cwms_timeseries_id, cwms_office_id, cwms_extent_earliest_time, cwms_extent_latest_time) VALUES - ($1, $2, $3, $4, $5) -` - -func (q *Queries) CreateTimeseriesCwms(ctx context.Context, tsCwms TimeseriesCwms) error { - _, err := q.db.ExecContext(ctx, createTimeseriesCwms, - tsCwms.ID, tsCwms.CwmsTimeseriesID, tsCwms.CwmsOfficeID, tsCwms.CwmsExtentEarliestTime, tsCwms.CwmsExtentLatestTime, - ) - return err -} - -const updateTimeseriesCwms = ` - UPDATE timeseries_cwms SET - cwms_timeseries_id=$2, - cwms_office_id=$3, - cwms_extent_earliest_time=$4, - cwms_extent_latest_time=$5 - WHERE timeseries_id=$1 -` - -func (q *Queries) UpdateTimeseriesCwms(ctx context.Context, tsCwms TimeseriesCwms) error { - _, err := q.db.ExecContext(ctx, updateTimeseriesCwms, - tsCwms.ID, tsCwms.CwmsTimeseriesID, tsCwms.CwmsOfficeID, tsCwms.CwmsExtentEarliestTime, tsCwms.CwmsExtentLatestTime, - ) - return err -} diff --git a/api/internal/model/timeseries_process.go b/api/internal/model/timeseries_process.go deleted file mode 100644 index d2bd6e5f..00000000 --- a/api/internal/model/timeseries_process.go +++ /dev/null @@ -1,564 +0,0 @@ -package model - -import ( - "context" - "database/sql" - "encoding/json" - "fmt" - "log" - "strconv" - "time" - - "github.com/Knetic/govaluate" - "github.com/google/uuid" - "github.com/jmoiron/sqlx/types" - "github.com/tidwall/btree" -) - -type ProcessTimeseriesInfo struct { - TimeseriesID uuid.UUID `json:"timeseries_id" db:"timeseries_id"` - InstrumentID uuid.UUID `json:"instrument_id" db:"instrument_id"` - Variable string `json:"variable" db:"variable"` - IsComputed bool `json:"is_computed" db:"is_computed"` - Formula *string `json:"formula" db:"formula"` -} - -type DBProcessTimeseries struct { - ProcessTimeseriesInfo - Measurements string `json:"measurements" db:"measurements"` - NextMeasurementLow *string `json:"next_measurement_low" db:"next_measurement_low"` - NextMeasurementHigh *string `json:"next_measurement_high" db:"next_measurement_high"` -} - -type ProcessTimeseries struct { - ProcessTimeseriesInfo - Measurements []ProcessMeasurement `json:"measurements" db:"measurements"` - NextMeasurementLow *ProcessMeasurement `json:"next_measurement_low" db:"next_measurement_low"` - NextMeasurementHigh *ProcessMeasurement `json:"next_measurement_high" db:"next_measurement_high"` - TimeWindow TimeWindow `json:"time_window"` -} - -type ProcessMeasurementCollection struct { - TimeseriesID uuid.UUID `json:"timeseries_id" db:"timeseries_id"` - Items []ProcessMeasurement `json:"items"` -} - -type ProcessTimeseriesResponseCollection []ProcessTimeseries - -type ProcessMeasurement struct { - Time time.Time `json:"time"` - Value float64 `json:"value"` - Error string `json:"error,omitempty"` -} - -func (m ProcessMeasurement) Lean() map[time.Time]float64 { - return map[time.Time]float64{m.Time: m.Value} -} - -type ProcessInclinometerTimeseries struct { - ProcessTimeseriesInfo - Measurements []ProcessInclinometerMeasurement `json:"measurements" db:"measurements"` - NextMeasurementLow *ProcessMeasurement `json:"next_measurement_low" db:"next_measurement_low"` - NextMeasurementHigh *ProcessMeasurement `json:"next_measurement_high" db:"next_measurement_high"` - TimeWindow TimeWindow `json:"time_window"` -} - -type ProcessInclinometerMeasurement struct { - Time time.Time `json:"time"` - Values types.JSONText `json:"values"` -} - -type ProcessInclinometerTimeseriesResponseCollection []ProcessInclinometerTimeseries - -func (m ProcessInclinometerMeasurement) InclinometerLean() map[time.Time]types.JSONText { - return map[time.Time]types.JSONText{m.Time: m.Values} -} - -// explorerResponseFactory returns the explorer-specific JSON response format -func explorerInclinometerResponseFactory(tt []ProcessInclinometerTimeseries) (map[uuid.UUID][]InclinometerMeasurementCollectionLean, error) { - response := make(map[uuid.UUID][]InclinometerMeasurementCollectionLean) - - for _, t := range tt { - if _, hasInstrument := response[t.InstrumentID]; !hasInstrument { - response[t.InstrumentID] = make([]InclinometerMeasurementCollectionLean, 0) - } - mcl := InclinometerMeasurementCollectionLean{ - TimeseriesID: t.TimeseriesID, - Items: make([]InclinometerMeasurementLean, len(t.Measurements)), - } - for idx, m := range t.Measurements { - mcl.Items[idx] = m.InclinometerLean() - } - response[t.InstrumentID] = append(response[t.InstrumentID], mcl) - } - - return response, nil -} - -// ProcessMeasurementFilter for conveniently passsing SQL query paramters to functions -type ProcessMeasurementFilter struct { - TimeseriesID *uuid.UUID `db:"timeseries_id"` - InstrumentID *uuid.UUID `db:"instrument_id"` - InstrumentGroupID *uuid.UUID `db:"instrument_group_id"` - InstrumentIDs []uuid.UUID `db:"instrument_ids"` - TimeseriesIDs []uuid.UUID `db:"timeseries_ids"` - After time.Time `db:"after"` - Before time.Time `db:"before"` -} - -// BTreeNode represents node for btree used for computing timeseries -type BTreeNode struct { - Key time.Time - Value map[string]interface{} -} - -func (mrc *ProcessTimeseriesResponseCollection) GroupByInstrument(threshold int) (map[uuid.UUID][]MeasurementCollectionLean, error) { - if len(*mrc) == 0 { - return make(map[uuid.UUID][]MeasurementCollectionLean), nil - } - - tmp := make(map[uuid.UUID]map[uuid.UUID][]MeasurementLean) - - for _, t := range *mrc { - if _, hasInstrument := tmp[t.InstrumentID]; !hasInstrument { - tmp[t.InstrumentID] = make(map[uuid.UUID][]MeasurementLean, 0) - } - if _, hasTimeseries := tmp[t.InstrumentID][t.TimeseriesID]; !hasTimeseries { - tmp[t.InstrumentID][t.TimeseriesID] = make([]MeasurementLean, 0) - } - for _, m := range t.Measurements { - tmp[t.InstrumentID][t.TimeseriesID] = append(tmp[t.InstrumentID][t.TimeseriesID], MeasurementLean{m.Time: m.Value}) - } - } - - res := make(map[uuid.UUID][]MeasurementCollectionLean) - - for instrumentID := range tmp { - res[instrumentID] = make([]MeasurementCollectionLean, 0) - - for tsID := range tmp[instrumentID] { - res[instrumentID] = append(res[instrumentID], - MeasurementCollectionLean{ - TimeseriesID: tsID, - Items: LTTB(tmp[instrumentID][tsID], threshold), - }, - ) - } - } - - return res, nil -} - -func (mrc *ProcessInclinometerTimeseriesResponseCollection) GroupByInstrument() (map[uuid.UUID][]InclinometerMeasurementCollectionLean, error) { - if len(*mrc) == 0 { - return make(map[uuid.UUID][]InclinometerMeasurementCollectionLean), sql.ErrNoRows - } - - res := make(map[uuid.UUID][]InclinometerMeasurementCollectionLean) - - for _, t := range *mrc { - if _, hasInstrument := res[t.InstrumentID]; !hasInstrument { - res[t.InstrumentID] = make([]InclinometerMeasurementCollectionLean, 0) - } - mcl := InclinometerMeasurementCollectionLean{ - TimeseriesID: t.TimeseriesID, - Items: make([]InclinometerMeasurementLean, len(t.Measurements)), - } - for idx, m := range t.Measurements { - mcl.Items[idx] = m.InclinometerLean() - } - res[t.InstrumentID] = append(res[t.InstrumentID], mcl) - } - return res, nil -} - -func (mrc *ProcessTimeseriesResponseCollection) CollectSingleTimeseries(threshold int, tsID uuid.UUID) (MeasurementCollection, error) { - if len(*mrc) == 0 { - return MeasurementCollection{ - TimeseriesID: tsID, - Items: make([]Measurement, 0), - }, nil - } - - for _, t := range *mrc { - if t.TimeseriesID == tsID { - mmts := make([]Measurement, len(t.Measurements)) - for i, m := range t.Measurements { - mmts[i] = Measurement{ - TimeseriesID: t.TimeseriesID, - Time: m.Time, - Value: FloatNanInf(m.Value), - Error: m.Error, - } - } - return MeasurementCollection{TimeseriesID: t.TimeseriesID, Items: LTTB(mmts, threshold)}, nil - } - } - - return MeasurementCollection{}, fmt.Errorf("requested timeseries does not match any in the result") -} - -// SelectMeasurements returns measurements for the timeseries specified in the filter -func (q *Queries) SelectMeasurements(ctx context.Context, f ProcessMeasurementFilter) (ProcessTimeseriesResponseCollection, error) { - tss, err := queryTimeseriesMeasurements(ctx, q, f) - if err != nil { - return tss, err - } - tss, err = processLOCF(tss) - if err != nil { - return tss, err - } - return tss, nil -} - -// SelectInclinometerMeasurements returns inclinometer measurements for the instruments specified in the filter -func (q *Queries) SelectInclinometerMeasurements(ctx context.Context, f ProcessMeasurementFilter) (ProcessInclinometerTimeseriesResponseCollection, error) { - tss, err := queryInclinometerTimeseriesMeasurements(ctx, q, f) - if err != nil { - return tss, err - } - return tss, nil -} - -// collectAggregate creates a btree of all sorted times (key) and measurements (value; as variable map) from an array of Timeseries -func collectAggregate(tss *ProcessTimeseriesResponseCollection) *btree.BTreeG[BTreeNode] { - // Get unique set of all measurement times of timeseries dependencies for non-regularized values - btm := btree.NewBTreeG(func(a, b BTreeNode) bool { return a.Key.Before(b.Key) }) - for _, ts := range *tss { - if ts.NextMeasurementLow != nil { - if item, exists := btm.Get(BTreeNode{Key: ts.NextMeasurementLow.Time}); !exists { - btm.Set(BTreeNode{Key: ts.NextMeasurementLow.Time, Value: map[string]interface{}{ts.Variable: ts.NextMeasurementLow.Value}}) - } else { - item.Value[ts.Variable] = ts.NextMeasurementLow.Value - btm.Set(item) - } - } - for _, m := range ts.Measurements { - if item, exists := btm.Get(BTreeNode{Key: m.Time}); !exists { - btm.Set(BTreeNode{Key: m.Time, Value: map[string]interface{}{ts.Variable: m.Value}}) - } else { - item.Value[ts.Variable] = m.Value - btm.Set(item) - } - } - if ts.NextMeasurementHigh != nil { - if item, exists := btm.Get(BTreeNode{Key: ts.NextMeasurementHigh.Time}); !exists { - btm.Set(BTreeNode{Key: ts.NextMeasurementHigh.Time, Value: map[string]interface{}{ts.Variable: ts.NextMeasurementHigh.Value}}) - } else { - item.Value[ts.Variable] = ts.NextMeasurementHigh.Value - btm.Set(item) - } - } - } - return btm -} - -// processLOCF calculates computed timeseries using "Last-Observation-Carried-Forward" algorithm -func processLOCF(tss ProcessTimeseriesResponseCollection) (ProcessTimeseriesResponseCollection, error) { - tssFinal := make(ProcessTimeseriesResponseCollection, 0) - var variableMap *btree.BTreeG[BTreeNode] - // Check if any computed timeseries present, collect aggregates used for calculations if so - for _, ts := range tss { - if ts.IsComputed { - variableMap = collectAggregate(&tss) - break - } - } - // Add any stored timeseries to the result - // Do calculations for computed timeseries and add to result - for _, ts := range tss { - // Array of existing measurements - a1 := make([]ProcessMeasurement, 0) - if ts.NextMeasurementLow != nil { - a1 = append(a1, *ts.NextMeasurementLow) - } - a1 = append(a1, ts.Measurements...) - if ts.NextMeasurementHigh != nil { - a1 = append(a1, *ts.NextMeasurementHigh) - } - - // Could do some additional checks before adding, like if the - // timeseries was actual requested or if it was just in the result as a - // dependency of the computed timeseries, just returning them all for now - if !ts.IsComputed { - tssFinal = append(tssFinal, ProcessTimeseries{ - ProcessTimeseriesInfo: ts.ProcessTimeseriesInfo, - Measurements: a1, - TimeWindow: ts.TimeWindow, - }) - continue - } - - // By now, all of the stored timeseries have been processed; - // the query is ordered in a way that priortizes stored timeseries - expr, err := govaluate.NewEvaluableExpression(*ts.Formula) - if err != nil { - continue - } - - // Do calculations - remember := make(map[string]interface{}) - a2 := make([]ProcessMeasurement, 0) - - it := variableMap.Iter() - for it.Next() { - item := it.Item() - - // fill in any missing gaps of data - for k, v := range remember { - if _, exists := item.Value[k]; !exists { - item.Value[k] = v - } - } - // Add/Update the most recent values - for k, v := range item.Value { - remember[k] = v - } - - val, err := expr.Evaluate(item.Value) - if err != nil { - continue - } - val64, err := strconv.ParseFloat(fmt.Sprint(val), 64) - if err != nil { - continue - } - - a2 = append(a2, ProcessMeasurement{Time: item.Key, Value: val64}) - } - it.Release() - - tssFinal = append(tssFinal, ProcessTimeseries{ - ProcessTimeseriesInfo: ts.ProcessTimeseriesInfo, - Measurements: a2, - TimeWindow: ts.TimeWindow, - }) - } - - return tssFinal, nil -} - -// SelectTimeseriesMeasurements selects stored measurements and dependencies for computed measurements -func queryTimeseriesMeasurements(ctx context.Context, q *Queries, f ProcessMeasurementFilter) (ProcessTimeseriesResponseCollection, error) { - var filterSQL string - var filterArg interface{} - // short circuiting before executing SQL query greatly improves query perfomance, - // rather than adding all parameters to the query with logical OR - if f.TimeseriesID != nil { - filterSQL = `id = ?` - filterArg = f.TimeseriesID - } else if f.InstrumentID != nil { - filterSQL = `instrument_id = ?` - filterArg = f.InstrumentID - } else if f.InstrumentGroupID != nil { - filterSQL = ` - instrument_id IN ( - SELECT instrument_id - FROM instrument_group_instruments - WHERE instrument_group_id = ? - )` - filterArg = f.InstrumentGroupID - } else if len(f.InstrumentIDs) > 0 { - filterSQL = `instrument_id IN (?)` - filterArg = f.InstrumentIDs - } else if len(f.TimeseriesIDs) > 0 { - filterSQL = `id IN (?)` - filterArg = f.TimeseriesIDs - } else { - return nil, fmt.Errorf("must supply valid filter for timeseries_measurement query") - } - listTimeseriesMeasurments := ` - WITH required_timeseries AS ( - ( - SELECT id - FROM v_timeseries_stored - WHERE ` + filterSQL + ` - ) - UNION ALL - ( - SELECT dependency_timeseries_id AS id - FROM v_timeseries_dependency - WHERE ` + filterSQL + ` - ) - ), - next_low AS ( - SELECT nlm.timeseries_id AS timeseries_id, json_build_object('time', nlm.time, 'value', m1.value) AS measurement - FROM ( - SELECT timeseries_id, MAX(time) AS time - FROM timeseries_measurement - WHERE timeseries_id IN (SELECT id FROM required_timeseries) AND time < ? - GROUP BY timeseries_id - ) nlm - INNER JOIN timeseries_measurement m1 ON m1.time = nlm.time AND m1.timeseries_id = nlm.timeseries_id - ), - next_high AS ( - SELECT nhm.timeseries_id AS timeseries_id, json_build_object('time', nhm.time, 'value', m2.value) AS measurement - FROM ( - SELECT timeseries_id, MIN(time) AS time - FROM timeseries_measurement - WHERE timeseries_id IN (SELECT id FROM required_timeseries) AND time > ? - GROUP BY timeseries_id - ) nhm - INNER JOIN timeseries_measurement m2 ON m2.time = nhm.time AND m2.timeseries_id = nhm.timeseries_id - ) - ( - SELECT - rt.id AS timeseries_id, - ts.instrument_id AS instrument_id, - i.slug || '.' || ts.slug AS variable, - false AS is_computed, - null AS formula, - COALESCE(( - SELECT json_agg(json_build_object('time', time, 'value', value) ORDER BY time ASC)::text - FROM timeseries_measurement - WHERE timeseries_id = rt.id AND time >= ? AND time <= ? - ), '[]') AS measurements, - nl.measurement::text AS next_measurement_low, - nh.measurement::text AS next_measurement_high - FROM required_timeseries rt - INNER JOIN timeseries ts ON ts.id = rt.id - INNER JOIN instrument i ON i.id = ts.instrument_id - LEFT JOIN next_low nl ON nl.timeseries_id = rt.id - LEFT JOIN next_high nh ON nh.timeseries_id = rt.id - ) - UNION ALL - ( - SELECT - id AS timeseries_id, - instrument_id AS instrument_id, - slug AS variable, - true AS is_computed, - contents AS formula, - '[]'::text AS measurements, - null AS next_measurement_low, - null AS next_measurement_high - FROM v_timeseries_computed - WHERE ` + filterSQL + ` AND contents IS NOT NULL - ) - ORDER BY is_computed - ` - query, args, err := sqlIn(listTimeseriesMeasurments, filterArg, filterArg, f.After, f.Before, f.After, f.Before, filterArg) - if err != nil { - return nil, err - } - query = q.db.Rebind(query) - tt := make([]DBProcessTimeseries, 0) - if err := q.db.SelectContext(ctx, &tt, query, args...); err != nil { - return make(ProcessTimeseriesResponseCollection, 0), err - } - tt2 := make(ProcessTimeseriesResponseCollection, len(tt)) - for idx, t := range tt { - tt2[idx] = ProcessTimeseries{ - ProcessTimeseriesInfo: t.ProcessTimeseriesInfo, - Measurements: make([]ProcessMeasurement, 0), - TimeWindow: TimeWindow{After: f.After, Before: f.Before}, - } - if err := json.Unmarshal([]byte(t.Measurements), &tt2[idx].Measurements); err != nil { - log.Println(err) - } - if t.NextMeasurementHigh != nil { - if err := json.Unmarshal([]byte(*t.NextMeasurementHigh), &tt2[idx].NextMeasurementHigh); err != nil { - log.Println(err) - } - } - if t.NextMeasurementLow != nil { - if err := json.Unmarshal([]byte(*t.NextMeasurementLow), &tt2[idx].NextMeasurementLow); err != nil { - log.Println(err) - } - } - } - return tt2, nil -} - -// ComputedInclinometerTimeseries returns computed and stored inclinometer timeseries for a specified array of instrument IDs -func queryInclinometerTimeseriesMeasurements(ctx context.Context, q *Queries, f ProcessMeasurementFilter) ([]ProcessInclinometerTimeseries, error) { - tt := make([]DBProcessTimeseries, 0) - listInclinometerTimeseriesMeasurements := ` - -- Get Timeseries and Dependencies for Calculations - -- timeseries required based on requested instrument - WITH requested_instruments AS ( - SELECT id - FROM instrument - WHERE id IN (?) - ), required_timeseries AS ( - -- Timeseries for Instrument - SELECT id FROM v_timeseries_stored WHERE instrument_id IN (SELECT id FROM requested_instruments) - UNION - -- Dependencies for Instrument Timeseries - SELECT dependency_timeseries_id AS id - FROM v_timeseries_dependency - WHERE instrument_id IN (SELECT id from requested_instruments) - ), - -- Measurements Within Time Window by timeseries_id; - measurements AS ( - SELECT timeseries_id, - json_agg(json_build_object('time', time, 'values', values) ORDER BY time ASC)::text AS measurements - FROM inclinometer_measurement - WHERE timeseries_id IN (SELECT id FROM required_timeseries) AND time >= ? AND time <= ? - GROUP BY timeseries_id - ) - -- Stored Timeseries - SELECT - rt.id AS timeseries_id, - ts.instrument_id AS instrument_id, - i.slug || '.' || ts.slug AS variable, - false AS is_computed, - null AS formula, - COALESCE(m.measurements, '[]') AS measurements - FROM required_timeseries rt - INNER JOIN timeseries ts ON ts.id = rt.id - INNER JOIN instrument i ON i.id = ts.instrument_id AND i.id IN (SELECT id FROM requested_instruments) - LEFT JOIN measurements m ON m.timeseries_id = rt.id - UNION - -- Computed Timeseries - SELECT - cc.id AS timeseries_id, - cc.instrument_id AS instrument_id, - cc.name AS variable, - true AS is_computed, - cc.contents AS formula, - '[]'::text AS measurements - FROM v_timeseries_computed cc - WHERE cc.contents IS NOT NULL AND cc.instrument_id IN (SELECT id FROM requested_instruments) - ORDER BY is_computed - ` - - query, args, err := sqlIn(listInclinometerTimeseriesMeasurements, f.InstrumentIDs, f.After, f.Before) - if err != nil { - return make([]ProcessInclinometerTimeseries, 0), err - } - query = q.db.Rebind(query) - if err := q.db.Select(&tt, query, args...); err != nil { - return make([]ProcessInclinometerTimeseries, 0), err - } - - // Unmarshal JSON Strings - tt2 := make([]ProcessInclinometerTimeseries, len(tt)) - for idx, t := range tt { - tt2[idx] = ProcessInclinometerTimeseries{ - ProcessTimeseriesInfo: t.ProcessTimeseriesInfo, - Measurements: make([]ProcessInclinometerMeasurement, 0), - TimeWindow: TimeWindow{After: f.After, Before: f.Before}, - } - cm, err := q.GetTimeseriesConstantMeasurement(ctx, t.TimeseriesID, "inclinometer-constant") - if err != nil { - return nil, err - } - if err := json.Unmarshal([]byte(t.Measurements), &tt2[idx].Measurements); err != nil { - log.Println(err) - } - for i := range tt2[idx].Measurements { - values, err := q.ListInclinometerMeasurementValues(ctx, t.TimeseriesID, tt2[idx].Measurements[i].Time, float64(cm.Value)) - if err != nil { - return nil, err - } - - jsonValues, err := json.Marshal(values) - if err != nil { - return nil, err - } - tt2[idx].Measurements[i].Values = jsonValues - } - } - return tt2, nil -} diff --git a/api/internal/server/api.go b/api/internal/server/api.go index cea68cdb..09445892 100644 --- a/api/internal/server/api.go +++ b/api/internal/server/api.go @@ -91,12 +91,12 @@ func (r *ApiServer) RegisterRoutes(h *handler.ApiHandler) { // Alert r.public.GET("/projects/:project_id/instruments/:instrument_id/alerts", h.ListAlertsForInstrument) - r.private.GET("/my_alerts", h.ListMyAlerts) + r.private.GET("/my_alerts", h.ListAlertsForProfile) r.private.POST("/my_alerts/:alert_id/read", h.DoAlertRead) r.private.POST("/my_alerts/:alert_id/unread", h.DoAlertUnread) //AlertConfig - r.public.GET("/projects/:project_id/alert_configs", h.GetAllAlertConfigsForProject) + r.public.GET("/projects/:project_id/alert_configs", h.ListAlertConfigsForProject) r.public.GET("/projects/:project_id/instruments/:instrument_id/alert_configs", h.ListInstrumentAlertConfigs) r.public.GET("/projects/:project_id/alert_configs/:alert_config_id", h.GetAlertConfig) r.private.POST("/projects/:project_id/alert_configs", h.CreateAlertConfig) @@ -123,6 +123,7 @@ func (r *ApiServer) RegisterRoutes(h *handler.ApiHandler) { r.private.PUT("/projects/:project_id/collection_groups/:collection_group_id", h.UpdateCollectionGroup) r.private.DELETE("/projects/:project_id/collection_groups/:collection_group_id", h.DeleteCollectionGroup) r.private.POST("/projects/:project_id/collection_groups/:collection_group_id/timeseries/:timeseries_id", h.AddTimeseriesToCollectionGroup) + r.private.PUT("/projects/:project_id/collection_groups/:collection_group_id/timeseries/:timeseries_id", h.UpdateTimeseriesCollectionGroupSortOrder) r.private.DELETE("/projects/:project_id/collection_groups/:collection_group_id/timeseries/:timeseries_id", h.RemoveTimeseriesFromCollectionGroup) // Datalogger @@ -140,8 +141,9 @@ func (r *ApiServer) RegisterRoutes(h *handler.ApiHandler) { r.public.GET("/projects/:project_id/district_rollup/measurement_submittals", h.ListProjectMeasurementDistrictRollup) // Domain - r.public.GET("/domains", h.GetDomains) + r.public.GET("/domains", h.ListDomains) r.public.GET("/domains/map", h.GetDomainMap) + r.public.GET("/domains/timezones", h.ListTimezoneOptions) // EquivalencyTable r.private.GET("/datalogger/:datalogger_id/tables/:datalogger_table_id/equivalency_table", h.GetEquivalencyTable) @@ -161,7 +163,6 @@ func (r *ApiServer) RegisterRoutes(h *handler.ApiHandler) { // Explorer r.public.POST("/explorer", h.ListTimeseriesMeasurementsExplorer) - r.public.POST("/inclinometer_explorer", h.ListInclinometerTimeseriesMeasurementsExplorer) // Heartbeat r.public.GET("/health", h.Healthcheck) @@ -173,7 +174,6 @@ func (r *ApiServer) RegisterRoutes(h *handler.ApiHandler) { r.public.GET("/home", h.GetHome) // Instrument - r.public.GET("/instruments", h.ListInstruments) r.public.GET("/instruments/count", h.GetInstrumentCount) r.public.GET("/instruments/:instrument_id", h.GetInstrument) r.public.GET("/instruments/:instrument_id/timeseries_measurements", h.ListTimeseriesMeasurementsByInstrument) @@ -207,7 +207,6 @@ func (r *ApiServer) RegisterRoutes(h *handler.ApiHandler) { r.private.DELETE("/instrument_groups/:instrument_group_id/instruments/:instrument_id", h.DeleteInstrumentGroupInstruments) // InstrumentNote - r.public.GET("/instruments/notes", h.ListInstrumentNotes) r.public.GET("/instruments/notes/:note_id", h.GetInstrumentNote) r.public.GET("/instruments/:instrument_id/notes", h.ListInstrumentInstrumentNotes) r.public.GET("/instruments/:instrument_id/notes/:note_id", h.GetInstrumentNote) @@ -224,7 +223,7 @@ func (r *ApiServer) RegisterRoutes(h *handler.ApiHandler) { r.private.DELETE("/instruments/:instrument_id/status/:status_id", h.DeleteInstrumentStatus) // IpiInstruemnt - r.public.GET("/instruments/ipi/:instrument_id/segments", h.GetAllIpiSegmentsForInstrument) + r.public.GET("/instruments/ipi/:instrument_id/segments", h.ListIpiSegmentsForInstrument) r.public.GET("/instruments/ipi/:instrument_id/measurements", h.GetIpiMeasurementsForInstrument) r.private.PUT("/instruments/ipi/:instrument_id/segments", h.UpdateIpiSegments) @@ -235,16 +234,13 @@ func (r *ApiServer) RegisterRoutes(h *handler.ApiHandler) { r.private.DELETE("/timeseries/:timeseries_id/measurements", h.DeleteTimeserieMeasurements) // InclinometerMeasurement - r.public.GET("/timeseries/:timeseries_id/inclinometer_measurements", h.ListInclinometerMeasurements) - r.private.POST("/projects/:project_id/inclinometer_measurements", h.CreateOrUpdateProjectInclinometerMeasurements) - r.private.DELETE("/timeseries/:timeseries_id/inclinometer_measurements", h.DeleteInclinometerMeasurements) + r.public.GET("/instruments/incl/:instrument_id/segments", h.ListInclSegmentsForInstrument) + r.public.GET("/instruments/incl/:instrument_id/measurements", h.GetInclMeasurementsForInstrument) + r.private.PUT("/instruments/incl/:instrument_id/segments", h.UpdateInclSegments) // Media r.public.GET("/projects/:project_slug/images/*", h.GetMedia) - // Opendcs - r.public.GET("/opendcs/sites", h.ListOpendcsSites) - // PlotConfig r.public.GET("/projects/:project_id/plot_configs", h.ListPlotConfigs) r.public.GET("/projects/:project_id/plot_configs/:plot_configuration_id", h.GetPlotConfig) @@ -308,10 +304,10 @@ func (r *ApiServer) RegisterRoutes(h *handler.ApiHandler) { r.private.GET("/projects/:project_id/report_configs/:report_config_id/jobs/:job_id/downloads", h.DownloadReport) // Search - r.public.GET("/search/:entity", h.Search) + r.public.GET("/search/:entity", h.ProjectSearch) // SaaInstrument - r.public.GET("/instruments/saa/:instrument_id/segments", h.GetAllSaaSegmentsForInstrument) + r.public.GET("/instruments/saa/:instrument_id/segments", h.ListSaaSegmentsForInstrument) r.public.GET("/instruments/saa/:instrument_id/measurements", h.GetSaaMeasurementsForInstrument) r.private.PUT("/instruments/saa/:instrument_id/segments", h.UpdateSaaSegments) @@ -336,8 +332,6 @@ func (r *ApiServer) RegisterRoutes(h *handler.ApiHandler) { // CalculatedTimeseries r.public.GET("/formulas", h.GetInstrumentCalculations) r.private.POST("/formulas", h.CreateCalculation) - // TODO: This PUT should really be a PATCH to conform to the REST spec - // Will need to coordinate this with the web client r.private.PUT("/formulas/:formula_id", h.UpdateCalculation) r.private.DELETE("/formulas/:formula_id", h.DeleteCalculation) @@ -347,9 +341,20 @@ func (r *ApiServer) RegisterRoutes(h *handler.ApiHandler) { r.private.PUT("/projects/:project_id/instruments/:instrument_id/timeseries/cwms/:timeseries_id", h.UpdateTimeseriesCwms) // ProcessTimeseries - r.public.GET("/timeseries/:timeseries_id/measurements", h.ListTimeseriesMeasurementsByTimeseries) - r.public.GET("/instruments/:instrument_id/timeseries/:timeseries_id/measurements", h.ListTimeseriesMeasurementsByTimeseries) + r.public.GET("/timeseries/:timeseries_id/measurements", h.ListTimeseriesMeasurementsForTimeseries) + r.public.GET("/instruments/:instrument_id/timeseries/:timeseries_id/measurements", h.ListTimeseriesMeasurementsForTimeseries) // Unit r.public.GET("/units", h.ListUnits) + + // Uploader + r.private.GET("/projects/:project_id/uploader_configs", h.ListUploaderConfigsForProject) + r.private.GET("/projects/:project_id/uploader_configs/:uploader_config_id/mappings", h.ListUploaderConfigMappings) + r.private.POST("/projects/:project_id/uploader_configs", h.CreateUploaderConfig) + r.private.PUT("/projects/:project_id/uploader_configs/:uploader_config_id", h.UpdateUploaderConfig) + r.private.DELETE("/projects/:project_id/uploader_configs/:uploader_config_id", h.DeleteUploaderConfig) + r.private.POST("/projects/:project_id/uploader_configs/:uploader_config_id/mappings", h.CreateUploaderConfigMappings) + r.private.PUT("/projects/:project_id/uploader_configs/:uploader_config_id/mappings", h.UpdateUploaderConfigMappings) + r.private.DELETE("/projects/:project_id/uploader_configs/:uploader_config_id/mappings", h.DeleteAllUploaderConfigMappingsForUploaderConfig) + r.private.POST("/projects/:project_id/uploader_configs/:uploader_config_id/uploads", h.UploadFileForUploaderConfig) } diff --git a/api/internal/server/docs/openapi.json b/api/internal/server/docs/openapi.json index e62c791f..216a12b1 100644 --- a/api/internal/server/docs/openapi.json +++ b/api/internal/server/docs/openapi.json @@ -32,7 +32,7 @@ "application/json" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/Submittal" + "$ref" : "#/components/schemas/db.VSubmittal" }, "type" : "array" } @@ -167,7 +167,7 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/AlertSubscription" + "$ref" : "#/components/schemas/dto.AlertSubscription" } } }, @@ -180,7 +180,7 @@ "application/json" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/AlertSubscription" + "$ref" : "#/components/schemas/db.AlertProfileSubscription" }, "type" : "array" } @@ -235,7 +235,7 @@ "application/json" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/AwarePlatformParameterConfig" + "$ref" : "#/components/schemas/service.AwarePlatformParameterConfig" }, "type" : "array" } @@ -286,7 +286,7 @@ "application/json" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/AwareParameter" + "$ref" : "#/components/schemas/db.AwareParameterListRow" }, "type" : "array" } @@ -343,7 +343,7 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/Datalogger" + "$ref" : "#/components/schemas/dto.Datalogger" } } }, @@ -355,10 +355,7 @@ "content" : { "application/json" : { "schema" : { - "items" : { - "$ref" : "#/components/schemas/DataloggerWithKey" - }, - "type" : "array" + "$ref" : "#/components/schemas/service.DataloggerWithKey" } } }, @@ -494,7 +491,7 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/Datalogger" + "$ref" : "#/components/schemas/db.VDatalogger" } } }, @@ -559,7 +556,7 @@ "content" : { "*/*" : { "schema" : { - "$ref" : "#/components/schemas/Datalogger" + "$ref" : "#/components/schemas/dto.Datalogger" } } }, @@ -571,7 +568,7 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/Datalogger" + "$ref" : "#/components/schemas/db.VDatalogger" } } }, @@ -639,7 +636,7 @@ "content" : { "*/*" : { "schema" : { - "$ref" : "#/components/schemas/EquivalencyTable" + "$ref" : "#/components/schemas/dto.EquivalencyTable" } } }, @@ -651,7 +648,7 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/EquivalencyTable" + "$ref" : "#/components/schemas/db.VDataloggerEquivalencyTable" } } }, @@ -720,7 +717,7 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/DataloggerWithKey" + "$ref" : "#/components/schemas/service.DataloggerWithKey" } } }, @@ -874,7 +871,7 @@ "application/json" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/EquivalencyTable" + "$ref" : "#/components/schemas/db.VDataloggerEquivalencyTable" }, "type" : "array" } @@ -950,7 +947,7 @@ "content" : { "*/*" : { "schema" : { - "$ref" : "#/components/schemas/EquivalencyTable" + "$ref" : "#/components/schemas/dto.EquivalencyTable" } } }, @@ -962,7 +959,7 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/EquivalencyTable" + "$ref" : "#/components/schemas/db.VDataloggerEquivalencyTable" } } }, @@ -1037,7 +1034,7 @@ "content" : { "*/*" : { "schema" : { - "$ref" : "#/components/schemas/EquivalencyTable" + "$ref" : "#/components/schemas/dto.EquivalencyTable" } } }, @@ -1049,7 +1046,7 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/EquivalencyTable" + "$ref" : "#/components/schemas/db.VDataloggerEquivalencyTable" } } }, @@ -1214,7 +1211,8 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/DataloggerTablePreview" + "additionalProperties" : true, + "type" : "object" } } }, @@ -1291,7 +1289,7 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/DataloggerTablePreview" + "$ref" : "#/components/schemas/db.VDataloggerPreview" } } }, @@ -1351,7 +1349,7 @@ "application/json" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/Datalogger" + "$ref" : "#/components/schemas/db.VDatalogger" }, "type" : "array" } @@ -1405,7 +1403,7 @@ "application/json" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/District" + "$ref" : "#/components/schemas/db.VDistrict" }, "type" : "array" } @@ -1456,7 +1454,7 @@ "application/json" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/Domain" + "$ref" : "#/components/schemas/db.PgTimezoneNamesListRow" }, "type" : "array" } @@ -1495,7 +1493,7 @@ "description" : "Internal Server Error" } }, - "summary" : "lists all domains", + "summary" : "lists time zone options", "tags" : [ "domain" ] } }, @@ -1506,7 +1504,7 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/DomainMap" + "$ref" : "#/components/schemas/service.DomainMap" } } }, @@ -1564,7 +1562,7 @@ "application/json" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/EmailAutocompleteResult" + "$ref" : "#/components/schemas/db.EmailAutocompleteListRow" }, "type" : "array" } @@ -1630,7 +1628,10 @@ "schema" : { "items" : { "additionalProperties" : { - "$ref" : "#/components/schemas/MeasurementCollectionLean" + "items" : { + "$ref" : "#/components/schemas/db.MeasurementCollectionLean" + }, + "type" : "array" }, "type" : "object" }, @@ -1684,7 +1685,7 @@ "application/json" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/CalculatedTimeseries" + "$ref" : "#/components/schemas/db.TimeseriesComputedListForInstrumentRow" }, "type" : "array" } @@ -1736,7 +1737,7 @@ } } ], "responses" : { - "200" : { + "201" : { "content" : { "application/json" : { "schema" : { @@ -1745,7 +1746,7 @@ } } }, - "description" : "OK" + "description" : "Created" }, "400" : { "content" : { @@ -1877,7 +1878,7 @@ "application/json" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/CalculatedTimeseries" + "$ref" : "#/components/schemas/dto.CalculatedTimeseries" }, "type" : "array" } @@ -1930,11 +1931,7 @@ "content" : { "application/json" : { "schema" : { - "items" : { - "additionalProperties" : true, - "type" : "object" - }, - "type" : "array" + "$ref" : "#/components/schemas/service.Healthcheck" } } }, @@ -1957,15 +1954,15 @@ } } ], "responses" : { - "200" : { + "201" : { "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/Heartbeat" + "$ref" : "#/components/schemas/service.Heartbeat" } } }, - "description" : "OK" + "description" : "Created" } }, "summary" : "creates a heartbeat entry at regular intervals", @@ -1979,7 +1976,7 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/Heartbeat" + "$ref" : "#/components/schemas/service.Heartbeat" } } }, @@ -1998,7 +1995,7 @@ "application/json" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/Heartbeat" + "$ref" : "#/components/schemas/service.Heartbeat" }, "type" : "array" } @@ -2018,7 +2015,7 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/Home" + "$ref" : "#/components/schemas/db.HomeGetRow" } } }, @@ -2039,75 +2036,6 @@ "tags" : [ "home" ] } }, - "/inclinometer_explorer" : { - "post" : { - "requestBody" : { - "content" : { - "application/json" : { - "schema" : { - "items" : { - "type" : "string" - }, - "type" : "array" - } - } - }, - "description" : "array of inclinometer instrument uuids", - "required" : true - }, - "responses" : { - "200" : { - "content" : { - "application/json" : { - "schema" : { - "items" : { - "additionalProperties" : { - "$ref" : "#/components/schemas/InclinometerMeasurementCollectionLean" - }, - "type" : "object" - }, - "type" : "array" - } - } - }, - "description" : "OK" - }, - "400" : { - "content" : { - "application/json" : { - "schema" : { - "$ref" : "#/components/schemas/echo.HTTPError" - } - } - }, - "description" : "Bad Request" - }, - "404" : { - "content" : { - "application/json" : { - "schema" : { - "$ref" : "#/components/schemas/echo.HTTPError" - } - } - }, - "description" : "Not Found" - }, - "500" : { - "content" : { - "application/json" : { - "schema" : { - "$ref" : "#/components/schemas/echo.HTTPError" - } - } - }, - "description" : "Internal Server Error" - } - }, - "summary" : "list inclinometer timeseries measurements for explorer page", - "tags" : [ "explorer" ], - "x-codegen-request-body-name" : "instrument_ids" - } - }, "/instrument_groups" : { "get" : { "responses" : { @@ -2116,7 +2044,7 @@ "application/json" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/InstrumentGroup" + "$ref" : "#/components/schemas/db.VInstrumentGroup" }, "type" : "array" } @@ -2171,7 +2099,7 @@ "content" : { "*/*" : { "schema" : { - "$ref" : "#/components/schemas/InstrumentGroup" + "$ref" : "#/components/schemas/dto.InstrumentGroup" } } }, @@ -2183,7 +2111,10 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/InstrumentGroup" + "items" : { + "$ref" : "#/components/schemas/db.InstrumentGroup" + }, + "type" : "array" } } }, @@ -2252,10 +2183,8 @@ "content" : { "application/json" : { "schema" : { - "items" : { - "$ref" : "#/components/schemas/InstrumentGroup" - }, - "type" : "array" + "additionalProperties" : true, + "type" : "object" } } }, @@ -2314,7 +2243,7 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/InstrumentGroup" + "$ref" : "#/components/schemas/db.VInstrumentGroup" } } }, @@ -2376,7 +2305,7 @@ "content" : { "*/*" : { "schema" : { - "$ref" : "#/components/schemas/InstrumentGroup" + "$ref" : "#/components/schemas/dto.InstrumentGroup" } } }, @@ -2388,7 +2317,7 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/InstrumentGroup" + "$ref" : "#/components/schemas/db.InstrumentGroupUpdateRow" } } }, @@ -2451,7 +2380,7 @@ "application/json" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/Instrument" + "$ref" : "#/components/schemas/db.VInstrument" }, "type" : "array" } @@ -2657,7 +2586,7 @@ "application/json" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/Timeseries" + "$ref" : "#/components/schemas/db.VTimeseries" }, "type" : "array" } @@ -2717,7 +2646,13 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/MeasurementCollection" + "additionalProperties" : { + "items" : { + "$ref" : "#/components/schemas/db.MeasurementCollectionLean" + }, + "type" : "array" + }, + "type" : "object" } } }, @@ -2758,57 +2693,6 @@ "tags" : [ "timeseries" ] } }, - "/instruments" : { - "get" : { - "responses" : { - "200" : { - "content" : { - "application/json" : { - "schema" : { - "items" : { - "$ref" : "#/components/schemas/Instrument" - }, - "type" : "array" - } - } - }, - "description" : "OK" - }, - "400" : { - "content" : { - "application/json" : { - "schema" : { - "$ref" : "#/components/schemas/echo.HTTPError" - } - } - }, - "description" : "Bad Request" - }, - "404" : { - "content" : { - "application/json" : { - "schema" : { - "$ref" : "#/components/schemas/echo.HTTPError" - } - } - }, - "description" : "Not Found" - }, - "500" : { - "content" : { - "application/json" : { - "schema" : { - "$ref" : "#/components/schemas/echo.HTTPError" - } - } - }, - "description" : "Internal Server Error" - } - }, - "summary" : "lists all instruments", - "tags" : [ "instrument" ] - } - }, "/instruments/count" : { "get" : { "responses" : { @@ -2816,7 +2700,8 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/InstrumentCount" + "additionalProperties" : true, + "type" : "object" } } }, @@ -2857,7 +2742,7 @@ "tags" : [ "instrument" ] } }, - "/instruments/ipi/{instrument_id}/measurements" : { + "/instruments/incl/{instrument_id}/measurements" : { "get" : { "parameters" : [ { "description" : "instrument uuid", @@ -2892,7 +2777,7 @@ "application/json" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/IpiMeasurements" + "$ref" : "#/components/schemas/db.VInclMeasurement" }, "type" : "array" } @@ -2932,10 +2817,10 @@ } }, "summary" : "creates instrument notes", - "tags" : [ "instrument-ipi" ] + "tags" : [ "instrument-incl" ] } }, - "/instruments/ipi/{instrument_id}/segments" : { + "/instruments/incl/{instrument_id}/segments" : { "get" : { "parameters" : [ { "description" : "instrument uuid", @@ -2953,7 +2838,7 @@ "application/json" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/IpiSegment" + "$ref" : "#/components/schemas/db.VInclSegment" }, "type" : "array" } @@ -2992,8 +2877,8 @@ "description" : "Internal Server Error" } }, - "summary" : "gets all ipi segments for an instrument", - "tags" : [ "instrument-ipi" ] + "summary" : "gets all incl segments for an instrument", + "tags" : [ "instrument-incl" ] }, "put" : { "parameters" : [ { @@ -3018,13 +2903,13 @@ "*/*" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/IpiSegment" + "$ref" : "#/components/schemas/dto.InclSegment" }, "type" : "array" } } }, - "description" : "ipi instrument segments payload", + "description" : "incl instrument segments payload", "required" : true }, "responses" : { @@ -3033,7 +2918,7 @@ "application/json" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/IpiSegment" + "$ref" : "#/components/schemas/dto.InclSegment" }, "type" : "array" } @@ -3075,20 +2960,47 @@ "security" : [ { "Bearer" : [ ] } ], - "summary" : "updates multiple segments for an ipi instrument", - "tags" : [ "instrument-ipi" ], + "summary" : "updates multiple segments for an incl instrument", + "tags" : [ "instrument-incl" ], "x-codegen-request-body-name" : "instrument_segments" } }, - "/instruments/notes" : { + "/instruments/ipi/{instrument_id}/measurements" : { "get" : { + "parameters" : [ { + "description" : "instrument uuid", + "in" : "path", + "name" : "instrument_id", + "required" : true, + "schema" : { + "format" : "uuid", + "type" : "string" + } + }, { + "description" : "after time", + "in" : "query", + "name" : "after", + "schema" : { + "format" : "date-time", + "type" : "string" + } + }, { + "description" : "before time", + "in" : "query", + "name" : "before", + "required" : true, + "schema" : { + "format" : "date-time", + "type" : "string" + } + } ], "responses" : { "200" : { "content" : { "application/json" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/InstrumentNote" + "$ref" : "#/components/schemas/db.VIpiMeasurement" }, "type" : "array" } @@ -3127,36 +3039,29 @@ "description" : "Internal Server Error" } }, - "summary" : "gets all instrument notes", - "tags" : [ "instrument-note" ] - }, - "post" : { + "summary" : "creates instrument notes", + "tags" : [ "instrument-ipi" ] + } + }, + "/instruments/ipi/{instrument_id}/segments" : { + "get" : { "parameters" : [ { - "description" : "api key", - "in" : "query", - "name" : "key", + "description" : "instrument uuid", + "in" : "path", + "name" : "instrument_id", + "required" : true, "schema" : { + "format" : "uuid", "type" : "string" } } ], - "requestBody" : { - "content" : { - "*/*" : { - "schema" : { - "$ref" : "#/components/schemas/InstrumentNoteCollection" - } - } - }, - "description" : "instrument note collection payload", - "required" : true - }, "responses" : { "200" : { "content" : { "application/json" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/InstrumentNote" + "$ref" : "#/components/schemas/db.VIpiSegment" }, "type" : "array" } @@ -3195,32 +3100,50 @@ "description" : "Internal Server Error" } }, - "security" : [ { - "Bearer" : [ ] - } ], - "summary" : "creates instrument notes", - "tags" : [ "instrument-note" ], - "x-codegen-request-body-name" : "instrument_note" - } - }, - "/instruments/notes/{note_id}" : { - "get" : { + "summary" : "gets all ipi segments for an instrument", + "tags" : [ "instrument-ipi" ] + }, + "put" : { "parameters" : [ { - "description" : "note uuid", + "description" : "instrument uuid", "in" : "path", - "name" : "note_id", + "name" : "instrument_id", "required" : true, "schema" : { "format" : "uuid", "type" : "string" } + }, { + "description" : "api key", + "in" : "query", + "name" : "key", + "schema" : { + "type" : "string" + } } ], + "requestBody" : { + "content" : { + "*/*" : { + "schema" : { + "items" : { + "$ref" : "#/components/schemas/dto.IpiSegment" + }, + "type" : "array" + } + } + }, + "description" : "ipi instrument segments payload", + "required" : true + }, "responses" : { "200" : { "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/InstrumentNote" + "items" : { + "$ref" : "#/components/schemas/dto.IpiSegment" + }, + "type" : "array" } } }, @@ -3257,12 +3180,148 @@ "description" : "Internal Server Error" } }, - "summary" : "gets a single instrument note by id", - "tags" : [ "instrument-note" ] - }, - "put" : { - "parameters" : [ { - "description" : "note uuid", + "security" : [ { + "Bearer" : [ ] + } ], + "summary" : "updates multiple segments for an ipi instrument", + "tags" : [ "instrument-ipi" ], + "x-codegen-request-body-name" : "instrument_segments" + } + }, + "/instruments/notes" : { + "post" : { + "parameters" : [ { + "description" : "api key", + "in" : "query", + "name" : "key", + "schema" : { + "type" : "string" + } + } ], + "requestBody" : { + "content" : { + "*/*" : { + "schema" : { + "$ref" : "#/components/schemas/dto.InstrumentNoteCollection" + } + } + }, + "description" : "instrument note collection payload", + "required" : true + }, + "responses" : { + "201" : { + "content" : { + "application/json" : { + "schema" : { + "items" : { + "$ref" : "#/components/schemas/db.InstrumentNote" + }, + "type" : "array" + } + } + }, + "description" : "Created" + }, + "400" : { + "content" : { + "application/json" : { + "schema" : { + "$ref" : "#/components/schemas/echo.HTTPError" + } + } + }, + "description" : "Bad Request" + }, + "404" : { + "content" : { + "application/json" : { + "schema" : { + "$ref" : "#/components/schemas/echo.HTTPError" + } + } + }, + "description" : "Not Found" + }, + "500" : { + "content" : { + "application/json" : { + "schema" : { + "$ref" : "#/components/schemas/echo.HTTPError" + } + } + }, + "description" : "Internal Server Error" + } + }, + "security" : [ { + "Bearer" : [ ] + } ], + "summary" : "creates instrument notes", + "tags" : [ "instrument-note" ], + "x-codegen-request-body-name" : "instrument_note" + } + }, + "/instruments/notes/{note_id}" : { + "get" : { + "parameters" : [ { + "description" : "note uuid", + "in" : "path", + "name" : "note_id", + "required" : true, + "schema" : { + "format" : "uuid", + "type" : "string" + } + } ], + "responses" : { + "200" : { + "content" : { + "application/json" : { + "schema" : { + "$ref" : "#/components/schemas/db.InstrumentNote" + } + } + }, + "description" : "OK" + }, + "400" : { + "content" : { + "application/json" : { + "schema" : { + "$ref" : "#/components/schemas/echo.HTTPError" + } + } + }, + "description" : "Bad Request" + }, + "404" : { + "content" : { + "application/json" : { + "schema" : { + "$ref" : "#/components/schemas/echo.HTTPError" + } + } + }, + "description" : "Not Found" + }, + "500" : { + "content" : { + "application/json" : { + "schema" : { + "$ref" : "#/components/schemas/echo.HTTPError" + } + } + }, + "description" : "Internal Server Error" + } + }, + "summary" : "gets a single instrument note by id", + "tags" : [ "instrument-note" ] + }, + "put" : { + "parameters" : [ { + "description" : "note uuid", "in" : "path", "name" : "note_id", "required" : true, @@ -3282,7 +3341,7 @@ "content" : { "*/*" : { "schema" : { - "$ref" : "#/components/schemas/InstrumentNote" + "$ref" : "#/components/schemas/dto.InstrumentNote" } } }, @@ -3295,7 +3354,7 @@ "application/json" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/AlertConfig" + "$ref" : "#/components/schemas/db.InstrumentNote" }, "type" : "array" } @@ -3377,7 +3436,7 @@ "application/json" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/SaaMeasurements" + "$ref" : "#/components/schemas/db.VSaaMeasurement" }, "type" : "array" } @@ -3438,7 +3497,7 @@ "application/json" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/SaaSegment" + "$ref" : "#/components/schemas/db.VSaaSegment" }, "type" : "array" } @@ -3503,7 +3562,7 @@ "*/*" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/SaaSegment" + "$ref" : "#/components/schemas/dto.SaaSegment" }, "type" : "array" } @@ -3518,7 +3577,7 @@ "application/json" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/SaaSegment" + "$ref" : "#/components/schemas/dto.SaaSegment" }, "type" : "array" } @@ -3582,7 +3641,7 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/Instrument" + "$ref" : "#/components/schemas/db.VInstrument" } } }, @@ -3641,7 +3700,7 @@ "application/json" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/InstrumentNote" + "$ref" : "#/components/schemas/db.InstrumentNote" }, "type" : "array" } @@ -3780,7 +3839,7 @@ "application/json" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/InstrumentStatus" + "$ref" : "#/components/schemas/db.VInstrumentStatus" }, "type" : "array" } @@ -3844,7 +3903,7 @@ "content" : { "*/*" : { "schema" : { - "$ref" : "#/components/schemas/InstrumentStatusCollection" + "$ref" : "#/components/schemas/dto.InstrumentStatusCollection" } } }, @@ -4005,7 +4064,7 @@ "application/json" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/AlertConfig" + "$ref" : "#/components/schemas/db.VInstrumentStatus" }, "type" : "array" } @@ -4073,7 +4132,7 @@ "application/json" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/Submittal" + "$ref" : "#/components/schemas/db.VSubmittal" }, "type" : "array" } @@ -4142,7 +4201,7 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/Timeseries" + "$ref" : "#/components/schemas/db.VTimeseries" } } }, @@ -4232,7 +4291,10 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/MeasurementCollection" + "items" : { + "$ref" : "#/components/schemas/db.MeasurementCollection" + }, + "type" : "array" } } }, @@ -4313,7 +4375,13 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/MeasurementCollection" + "additionalProperties" : { + "items" : { + "$ref" : "#/components/schemas/db.MeasurementCollectionLean" + }, + "type" : "array" + }, + "type" : "object" } } }, @@ -4370,7 +4438,7 @@ "application/json" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/AlertSubscription" + "$ref" : "#/components/schemas/db.AlertProfileSubscription" }, "type" : "array" } @@ -4433,7 +4501,7 @@ "application/json" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/Alert" + "$ref" : "#/components/schemas/db.AlertListForProfileRow" }, "type" : "array" } @@ -4500,15 +4568,15 @@ } } ], "responses" : { - "200" : { + "201" : { "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/Alert" + "$ref" : "#/components/schemas/db.AlertGetRow" } } }, - "description" : "OK" + "description" : "Created" }, "400" : { "content" : { @@ -4573,7 +4641,7 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/Alert" + "$ref" : "#/components/schemas/db.AlertGetRow" } } }, @@ -4624,7 +4692,7 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/Profile" + "$ref" : "#/components/schemas/db.VProfile" } } }, @@ -4684,7 +4752,7 @@ "application/json" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/Project" + "$ref" : "#/components/schemas/db.VProject" }, "type" : "array" } @@ -4737,7 +4805,7 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/Token" + "$ref" : "#/components/schemas/service.Token" } } }, @@ -4843,57 +4911,6 @@ "tags" : [ "profile" ] } }, - "/opendcs/sites" : { - "get" : { - "responses" : { - "200" : { - "content" : { - "text/xml" : { - "schema" : { - "items" : { - "$ref" : "#/components/schemas/Site" - }, - "type" : "array" - } - } - }, - "description" : "OK" - }, - "400" : { - "content" : { - "text/xml" : { - "schema" : { - "$ref" : "#/components/schemas/echo.HTTPError" - } - } - }, - "description" : "Bad Request" - }, - "404" : { - "content" : { - "text/xml" : { - "schema" : { - "$ref" : "#/components/schemas/echo.HTTPError" - } - } - }, - "description" : "Not Found" - }, - "500" : { - "content" : { - "text/xml" : { - "schema" : { - "$ref" : "#/components/schemas/echo.HTTPError" - } - } - }, - "description" : "Internal Server Error" - } - }, - "summary" : "lists all instruments, represented as opendcs sites", - "tags" : [ "opendcs" ] - } - }, "/profiles" : { "post" : { "responses" : { @@ -4901,7 +4918,7 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/Profile" + "$ref" : "#/components/schemas/db.ProfileCreateRow" } } }, @@ -4961,7 +4978,7 @@ "application/json" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/Project" + "$ref" : "#/components/schemas/db.VProject" }, "type" : "array" } @@ -5017,7 +5034,7 @@ "*/*" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/Project" + "$ref" : "#/components/schemas/dto.Project" }, "type" : "array" } @@ -5027,18 +5044,18 @@ "required" : true }, "responses" : { - "200" : { + "201" : { "content" : { "application/json" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/IDSlugName" + "$ref" : "#/components/schemas/db.ProjectCreateBatchRow" }, "type" : "array" } } }, - "description" : "OK" + "description" : "Created" }, "400" : { "content" : { @@ -5086,7 +5103,7 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/ProjectCount" + "$ref" : "#/components/schemas/service.ProjectCount" } } }, @@ -5211,7 +5228,7 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/Project" + "$ref" : "#/components/schemas/db.VProject" } } }, @@ -5273,7 +5290,7 @@ "content" : { "*/*" : { "schema" : { - "$ref" : "#/components/schemas/Project" + "$ref" : "#/components/schemas/dto.Project" } } }, @@ -5285,7 +5302,7 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/Project" + "$ref" : "#/components/schemas/db.VProject" } } }, @@ -5348,7 +5365,7 @@ "application/json" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/AlertConfig" + "$ref" : "#/components/schemas/db.VAlertConfig" }, "type" : "array" } @@ -5412,7 +5429,7 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/AlertConfig" + "$ref" : "#/components/schemas/dto.AlertConfig" } } }, @@ -5424,7 +5441,7 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/AlertConfig" + "$ref" : "#/components/schemas/db.VAlertConfig" } } }, @@ -5502,10 +5519,8 @@ "content" : { "application/json" : { "schema" : { - "items" : { - "$ref" : "#/components/schemas/AlertConfig" - }, - "type" : "array" + "additionalProperties" : true, + "type" : "object" } } }, @@ -5573,7 +5588,7 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/AlertConfig" + "$ref" : "#/components/schemas/db.VAlertConfig" } } }, @@ -5644,7 +5659,7 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/AlertConfig" + "$ref" : "#/components/schemas/dto.AlertConfig" } } }, @@ -5656,10 +5671,7 @@ "content" : { "application/json" : { "schema" : { - "items" : { - "$ref" : "#/components/schemas/AlertConfig" - }, - "type" : "array" + "$ref" : "#/components/schemas/db.VAlertConfig" } } }, @@ -5722,7 +5734,7 @@ "application/json" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/AlertConfig" + "$ref" : "#/components/schemas/db.CollectionGroup" }, "type" : "array" } @@ -5787,7 +5799,7 @@ "content" : { "*/*" : { "schema" : { - "$ref" : "#/components/schemas/CollectionGroup" + "$ref" : "#/components/schemas/dto.CollectionGroup" } } }, @@ -5800,7 +5812,7 @@ "application/json" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/CollectionGroup" + "$ref" : "#/components/schemas/db.CollectionGroup" }, "type" : "array" } @@ -5949,7 +5961,7 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/CollectionGroupDetails" + "$ref" : "#/components/schemas/db.VCollectionGroupDetails" } } }, @@ -6019,7 +6031,7 @@ "content" : { "*/*" : { "schema" : { - "$ref" : "#/components/schemas/CollectionGroup" + "$ref" : "#/components/schemas/dto.CollectionGroup" } } }, @@ -6031,7 +6043,7 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/CollectionGroup" + "$ref" : "#/components/schemas/db.CollectionGroup" } } }, @@ -6199,7 +6211,7 @@ } } ], "responses" : { - "200" : { + "201" : { "content" : { "application/json" : { "schema" : { @@ -6208,7 +6220,7 @@ } } }, - "description" : "OK" + "description" : "Created" }, "400" : { "content" : { @@ -6246,12 +6258,10 @@ } ], "summary" : "adds a timeseries to a collection group", "tags" : [ "collection-groups" ] - } - }, - "/projects/{project_id}/district_rollup/evaluation_submittals" : { - "get" : { + }, + "put" : { "parameters" : [ { - "description" : "project id", + "description" : "project uuid", "in" : "path", "name" : "project_id", "required" : true, @@ -6259,16 +6269,39 @@ "format" : "uuid", "type" : "string" } + }, { + "description" : "collection group uuid", + "in" : "path", + "name" : "collection_group_id", + "required" : true, + "schema" : { + "format" : "uuid", + "type" : "string" + } + }, { + "description" : "timeseries uuid", + "in" : "path", + "name" : "timeseries_id", + "required" : true, + "schema" : { + "format" : "uuid", + "type" : "string" + } + }, { + "description" : "api key", + "in" : "query", + "name" : "key", + "schema" : { + "type" : "string" + } } ], "responses" : { "200" : { "content" : { "application/json" : { "schema" : { - "items" : { - "$ref" : "#/components/schemas/DistrictRollup" - }, - "type" : "array" + "additionalProperties" : true, + "type" : "object" } } }, @@ -6305,11 +6338,14 @@ "description" : "Internal Server Error" } }, - "summary" : "lists monthly evaluation statistics for a district by project id", - "tags" : [ "district-rollup" ] + "security" : [ { + "Bearer" : [ ] + } ], + "summary" : "updates sort order for collection group timesries", + "tags" : [ "collection-groups" ] } }, - "/projects/{project_id}/district_rollup/measurement_submittals" : { + "/projects/{project_id}/district_rollup/evaluation_submittals" : { "get" : { "parameters" : [ { "description" : "project id", @@ -6327,7 +6363,7 @@ "application/json" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/DistrictRollup" + "$ref" : "#/components/schemas/db.VDistrictRollup" }, "type" : "array" } @@ -6366,7 +6402,68 @@ "description" : "Internal Server Error" } }, - "summary" : "lists monthly measurement statistics for a district by project id", + "summary" : "lists monthly evaluation statistics for a district by project id", + "tags" : [ "district-rollup" ] + } + }, + "/projects/{project_id}/district_rollup/measurement_submittals" : { + "get" : { + "parameters" : [ { + "description" : "project id", + "in" : "path", + "name" : "project_id", + "required" : true, + "schema" : { + "format" : "uuid", + "type" : "string" + } + } ], + "responses" : { + "200" : { + "content" : { + "application/json" : { + "schema" : { + "items" : { + "$ref" : "#/components/schemas/db.VDistrictRollup" + }, + "type" : "array" + } + } + }, + "description" : "OK" + }, + "400" : { + "content" : { + "application/json" : { + "schema" : { + "$ref" : "#/components/schemas/echo.HTTPError" + } + } + }, + "description" : "Bad Request" + }, + "404" : { + "content" : { + "application/json" : { + "schema" : { + "$ref" : "#/components/schemas/echo.HTTPError" + } + } + }, + "description" : "Not Found" + }, + "500" : { + "content" : { + "application/json" : { + "schema" : { + "$ref" : "#/components/schemas/echo.HTTPError" + } + } + }, + "description" : "Internal Server Error" + } + }, + "summary" : "lists monthly measurement statistics for a district by project id", "tags" : [ "district-rollup" ] } }, @@ -6388,7 +6485,7 @@ "application/json" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/Evaluation" + "$ref" : "#/components/schemas/db.VEvaluation" }, "type" : "array" } @@ -6452,7 +6549,7 @@ "content" : { "*/*" : { "schema" : { - "$ref" : "#/components/schemas/Evaluation" + "$ref" : "#/components/schemas/dto.Evaluation" } } }, @@ -6460,15 +6557,15 @@ "required" : true }, "responses" : { - "200" : { + "201" : { "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/Evaluation" + "$ref" : "#/components/schemas/db.VEvaluation" } } }, - "description" : "OK" + "description" : "Created" }, "400" : { "content" : { @@ -6543,7 +6640,8 @@ "application/json" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/AlertConfig" + "additionalProperties" : true, + "type" : "object" }, "type" : "array" } @@ -6613,7 +6711,7 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/Evaluation" + "$ref" : "#/components/schemas/db.VEvaluation" } } }, @@ -6684,7 +6782,7 @@ "content" : { "*/*" : { "schema" : { - "$ref" : "#/components/schemas/Evaluation" + "$ref" : "#/components/schemas/dto.Evaluation" } } }, @@ -6696,7 +6794,7 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/Evaluation" + "$ref" : "#/components/schemas/db.VEvaluation" } } }, @@ -6810,89 +6908,6 @@ "tags" : [ "project" ] } }, - "/projects/{project_id}/inclinometer_measurements" : { - "post" : { - "parameters" : [ { - "description" : "project uuid", - "in" : "path", - "name" : "project_id", - "required" : true, - "schema" : { - "format" : "uuid", - "type" : "string" - } - }, { - "description" : "api key", - "in" : "query", - "name" : "key", - "schema" : { - "type" : "string" - } - } ], - "requestBody" : { - "content" : { - "*/*" : { - "schema" : { - "$ref" : "#/components/schemas/InclinometerMeasurementCollectionCollection" - } - } - }, - "description" : "inclinometer measurement collections", - "required" : true - }, - "responses" : { - "200" : { - "content" : { - "application/json" : { - "schema" : { - "items" : { - "$ref" : "#/components/schemas/InclinometerMeasurementCollection" - }, - "type" : "array" - } - } - }, - "description" : "OK" - }, - "400" : { - "content" : { - "application/json" : { - "schema" : { - "$ref" : "#/components/schemas/echo.HTTPError" - } - } - }, - "description" : "Bad Request" - }, - "404" : { - "content" : { - "application/json" : { - "schema" : { - "$ref" : "#/components/schemas/echo.HTTPError" - } - } - }, - "description" : "Not Found" - }, - "500" : { - "content" : { - "application/json" : { - "schema" : { - "$ref" : "#/components/schemas/echo.HTTPError" - } - } - }, - "description" : "Internal Server Error" - } - }, - "security" : [ { - "Bearer" : [ ] - } ], - "summary" : "creates or updates one or more inclinometer measurements", - "tags" : [ "measurement-inclinometer" ], - "x-codegen-request-body-name" : "timeseries_measurement_collections" - } - }, "/projects/{project_id}/instrument_groups" : { "get" : { "parameters" : [ { @@ -6911,7 +6926,7 @@ "application/json" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/InstrumentGroup" + "$ref" : "#/components/schemas/db.VInstrumentGroup" }, "type" : "array" } @@ -6972,7 +6987,7 @@ "application/json" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/Project" + "$ref" : "#/components/schemas/db.VInstrument" }, "type" : "array" } @@ -7046,7 +7061,7 @@ "*/*" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/Instrument" + "$ref" : "#/components/schemas/dto.Instrument" }, "type" : "array" } @@ -7056,18 +7071,18 @@ "required" : true }, "responses" : { - "200" : { + "201" : { "content" : { "application/json" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/IDSlugName" + "$ref" : "#/components/schemas/db.InstrumentCreateBatchRow" }, "type" : "array" } } }, - "description" : "OK" + "description" : "Created" }, "400" : { "content" : { @@ -7140,7 +7155,7 @@ "content" : { "*/*" : { "schema" : { - "$ref" : "#/components/schemas/ProjectInstrumentAssignments" + "$ref" : "#/components/schemas/dto.ProjectInstrumentAssignments" } } }, @@ -7152,7 +7167,7 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/InstrumentsValidation" + "$ref" : "#/components/schemas/service.InstrumentsValidation" } } }, @@ -7305,7 +7320,7 @@ "content" : { "*/*" : { "schema" : { - "$ref" : "#/components/schemas/Instrument" + "$ref" : "#/components/schemas/dto.Instrument" } } }, @@ -7317,7 +7332,7 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/Instrument" + "$ref" : "#/components/schemas/db.VInstrument" } } }, @@ -7389,7 +7404,7 @@ "application/json" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/AlertConfig" + "$ref" : "#/components/schemas/db.VAlertConfig" }, "type" : "array" } @@ -7470,15 +7485,15 @@ } } ], "responses" : { - "200" : { + "201" : { "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/AlertSubscription" + "$ref" : "#/components/schemas/db.AlertProfileSubscription" } } }, - "description" : "OK" + "description" : "Created" }, "400" : { "content" : { @@ -7633,7 +7648,7 @@ "application/json" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/Alert" + "$ref" : "#/components/schemas/db.VAlert" }, "type" : "array" } @@ -7718,7 +7733,7 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/InstrumentsValidation" + "$ref" : "#/components/schemas/service.InstrumentsValidation" } } }, @@ -7790,15 +7805,15 @@ } } ], "responses" : { - "200" : { + "201" : { "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/InstrumentsValidation" + "$ref" : "#/components/schemas/service.InstrumentsValidation" } } }, - "description" : "OK" + "description" : "Created" }, "400" : { "content" : { @@ -7877,7 +7892,7 @@ "content" : { "*/*" : { "schema" : { - "$ref" : "#/components/schemas/InstrumentProjectAssignments" + "$ref" : "#/components/schemas/dto.InstrumentProjectAssignments" } } }, @@ -7889,7 +7904,7 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/InstrumentsValidation" + "$ref" : "#/components/schemas/service.InstrumentsValidation" } } }, @@ -7961,7 +7976,7 @@ "application/json" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/Timeseries" + "$ref" : "#/components/schemas/db.VTimeseries" }, "type" : "array" } @@ -8034,7 +8049,7 @@ "content" : { "*/*" : { "schema" : { - "$ref" : "#/components/schemas/TimeseriesCollectionItems" + "$ref" : "#/components/schemas/dto.TimeseriesCollectionItems" } } }, @@ -8047,7 +8062,7 @@ "application/json" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/Timeseries" + "$ref" : "#/components/schemas/db.TimeseriesCreateBatchRow" }, "type" : "array" } @@ -8208,7 +8223,7 @@ "application/json" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/Evaluation" + "$ref" : "#/components/schemas/dto.Evaluation" }, "type" : "array" } @@ -8283,7 +8298,7 @@ "content" : { "*/*" : { "schema" : { - "$ref" : "#/components/schemas/Instrument" + "$ref" : "#/components/schemas/dto.Instrument" } } }, @@ -8295,7 +8310,7 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/Instrument" + "$ref" : "#/components/schemas/db.VInstrument" } } }, @@ -8367,7 +8382,7 @@ "application/json" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/Timeseries" + "$ref" : "#/components/schemas/db.VTimeseries" }, "type" : "array" } @@ -8437,7 +8452,7 @@ "application/json" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/TimeseriesCwms" + "$ref" : "#/components/schemas/db.VTimeseriesCwms" }, "type" : "array" } @@ -8504,7 +8519,7 @@ "*/*" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/TimeseriesCwms" + "$ref" : "#/components/schemas/dto.TimeseriesCwms" }, "type" : "array" } @@ -8514,18 +8529,16 @@ "required" : true }, "responses" : { - "200" : { + "201" : { "content" : { "application/json" : { "schema" : { - "items" : { - "$ref" : "#/components/schemas/TimeseriesCwms" - }, - "type" : "array" + "additionalProperties" : true, + "type" : "object" } } }, - "description" : "OK" + "description" : "Created" }, "400" : { "content" : { @@ -8597,7 +8610,7 @@ "content" : { "*/*" : { "schema" : { - "$ref" : "#/components/schemas/TimeseriesCwms" + "$ref" : "#/components/schemas/dto.TimeseriesCwms" } } }, @@ -8610,7 +8623,8 @@ "application/json" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/TimeseriesCwms" + "additionalProperties" : true, + "type" : "object" }, "type" : "array" } @@ -8679,7 +8693,7 @@ "application/json" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/ProjectMembership" + "$ref" : "#/components/schemas/db.ProfileProjectRoleListForProjectRow" }, "type" : "array" } @@ -8848,15 +8862,15 @@ } } ], "responses" : { - "200" : { + "201" : { "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/ProjectMembership" + "$ref" : "#/components/schemas/db.ProfileProjectRoleGetRow" } } }, - "description" : "OK" + "description" : "Created" }, "400" : { "content" : { @@ -8914,7 +8928,7 @@ "application/json" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/PlotConfig" + "$ref" : "#/components/schemas/db.VPlotConfiguration" }, "type" : "array" } @@ -8980,7 +8994,7 @@ "content" : { "*/*" : { "schema" : { - "$ref" : "#/components/schemas/PlotConfigBullseyePlot" + "$ref" : "#/components/schemas/dto.PlotConfigBullseyePlot" } } }, @@ -8988,15 +9002,15 @@ "required" : true }, "responses" : { - "200" : { + "201" : { "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/PlotConfig" + "$ref" : "#/components/schemas/db.VPlotConfiguration" } } }, - "description" : "OK" + "description" : "Created" }, "400" : { "content" : { @@ -9069,7 +9083,7 @@ "content" : { "*/*" : { "schema" : { - "$ref" : "#/components/schemas/PlotConfigBullseyePlot" + "$ref" : "#/components/schemas/dto.PlotConfigBullseyePlot" } } }, @@ -9081,7 +9095,7 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/PlotConfig" + "$ref" : "#/components/schemas/db.VPlotConfiguration" } } }, @@ -9160,7 +9174,7 @@ "application/json" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/PlotConfigMeasurementBullseyePlot" + "$ref" : "#/components/schemas/db.PlotConfigMeasurementListBullseyeRow" }, "type" : "array" } @@ -9229,7 +9243,7 @@ "content" : { "*/*" : { "schema" : { - "$ref" : "#/components/schemas/PlotConfigContourPlot" + "$ref" : "#/components/schemas/dto.PlotConfigContourPlot" } } }, @@ -9237,15 +9251,15 @@ "required" : true }, "responses" : { - "200" : { + "201" : { "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/PlotConfig" + "$ref" : "#/components/schemas/db.VPlotConfiguration" } } }, - "description" : "OK" + "description" : "Created" }, "400" : { "content" : { @@ -9318,7 +9332,7 @@ "content" : { "*/*" : { "schema" : { - "$ref" : "#/components/schemas/PlotConfigContourPlot" + "$ref" : "#/components/schemas/dto.PlotConfigContourPlot" } } }, @@ -9330,7 +9344,7 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/PlotConfig" + "$ref" : "#/components/schemas/db.VPlotConfiguration" } } }, @@ -9416,7 +9430,7 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/AggregatePlotConfigMeasurementsContourPlot" + "$ref" : "#/components/schemas/service.AggregatePlotConfigMeasurementsContourPlot" } } }, @@ -9577,7 +9591,7 @@ "content" : { "*/*" : { "schema" : { - "$ref" : "#/components/schemas/PlotConfigProfilePlot" + "$ref" : "#/components/schemas/dto.PlotConfigProfilePlot" } } }, @@ -9585,15 +9599,15 @@ "required" : true }, "responses" : { - "200" : { + "201" : { "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/PlotConfig" + "$ref" : "#/components/schemas/db.VPlotConfiguration" } } }, - "description" : "OK" + "description" : "Created" }, "400" : { "content" : { @@ -9666,7 +9680,7 @@ "content" : { "*/*" : { "schema" : { - "$ref" : "#/components/schemas/PlotConfigProfilePlot" + "$ref" : "#/components/schemas/dto.PlotConfigProfilePlot" } } }, @@ -9678,7 +9692,7 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/PlotConfig" + "$ref" : "#/components/schemas/db.VPlotConfiguration" } } }, @@ -9746,7 +9760,7 @@ "content" : { "*/*" : { "schema" : { - "$ref" : "#/components/schemas/PlotConfigScatterLinePlot" + "$ref" : "#/components/schemas/dto.PlotConfigScatterLinePlot" } } }, @@ -9754,15 +9768,15 @@ "required" : true }, "responses" : { - "200" : { + "201" : { "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/PlotConfig" + "$ref" : "#/components/schemas/db.VPlotConfiguration" } } }, - "description" : "OK" + "description" : "Created" }, "400" : { "content" : { @@ -9835,7 +9849,7 @@ "content" : { "*/*" : { "schema" : { - "$ref" : "#/components/schemas/PlotConfigScatterLinePlot" + "$ref" : "#/components/schemas/dto.PlotConfigScatterLinePlot" } } }, @@ -9847,7 +9861,7 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/PlotConfig" + "$ref" : "#/components/schemas/db.VPlotConfiguration" } } }, @@ -9994,7 +10008,7 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/PlotConfig" + "$ref" : "#/components/schemas/db.VPlotConfiguration" } } }, @@ -10053,7 +10067,7 @@ "application/json" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/PlotConfig" + "$ref" : "#/components/schemas/db.VPlotConfiguration" }, "type" : "array" } @@ -10117,7 +10131,7 @@ "content" : { "*/*" : { "schema" : { - "$ref" : "#/components/schemas/PlotConfigScatterLinePlot" + "$ref" : "#/components/schemas/dto.PlotConfigScatterLinePlot" } } }, @@ -10125,15 +10139,15 @@ "required" : true }, "responses" : { - "200" : { + "201" : { "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/PlotConfig" + "$ref" : "#/components/schemas/db.VPlotConfiguration" } } }, - "description" : "OK" + "description" : "Created" }, "400" : { "content" : { @@ -10276,7 +10290,7 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/PlotConfig" + "$ref" : "#/components/schemas/db.VPlotConfiguration" } } }, @@ -10347,7 +10361,7 @@ "content" : { "*/*" : { "schema" : { - "$ref" : "#/components/schemas/PlotConfigScatterLinePlot" + "$ref" : "#/components/schemas/dto.PlotConfigScatterLinePlot" } } }, @@ -10359,7 +10373,7 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/PlotConfig" + "$ref" : "#/components/schemas/db.VPlotConfiguration" } } }, @@ -10428,7 +10442,7 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/ReportConfig" + "$ref" : "#/components/schemas/db.VReportConfig" } } }, @@ -10493,7 +10507,7 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/ReportConfig" + "$ref" : "#/components/schemas/dto.ReportConfig" } } }, @@ -10505,7 +10519,7 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/ReportConfig" + "$ref" : "#/components/schemas/db.VReportConfig" } } }, @@ -10658,7 +10672,7 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/ReportConfig" + "$ref" : "#/components/schemas/dto.ReportConfig" } } }, @@ -10749,7 +10763,7 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/ReportDownloadJob" + "$ref" : "#/components/schemas/db.ReportDownloadJob" } } }, @@ -10835,7 +10849,7 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/ReportDownloadJob" + "$ref" : "#/components/schemas/db.ReportDownloadJob" } } }, @@ -10984,7 +10998,7 @@ "application/json" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/Submittal" + "$ref" : "#/components/schemas/db.VSubmittal" }, "type" : "array" } @@ -11045,7 +11059,7 @@ "application/json" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/Timeseries" + "$ref" : "#/components/schemas/db.VTimeseries" }, "type" : "array" } @@ -11111,7 +11125,7 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/TimeseriesMeasurementCollectionCollection" + "$ref" : "#/components/schemas/dto.TimeseriesMeasurementCollectionCollection" } } }, @@ -11123,10 +11137,8 @@ "content" : { "application/json" : { "schema" : { - "items" : { - "$ref" : "#/components/schemas/MeasurementCollection" - }, - "type" : "array" + "additionalProperties" : true, + "type" : "object" } } }, @@ -11208,7 +11220,7 @@ "content" : { "*/*" : { "schema" : { - "$ref" : "#/components/schemas/TimeseriesMeasurementCollectionCollection" + "$ref" : "#/components/schemas/dto.TimeseriesMeasurementCollectionCollection" } } }, @@ -11221,7 +11233,8 @@ "application/json" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/MeasurementCollection" + "additionalProperties" : true, + "type" : "object" }, "type" : "array" } @@ -11268,95 +11281,79 @@ "x-codegen-request-body-name" : "timeseries_measurement_collections" } }, - "/projects/{project_slug}/images/{uri_path}" : { + "/projects/{project_id}/uploader_configs" : { "get" : { "parameters" : [ { - "description" : "project abbr", - "in" : "path", - "name" : "project_slug", - "required" : true, - "schema" : { - "type" : "string" - } - }, { - "description" : "uri path of requested resource", + "description" : "project uuid", "in" : "path", - "name" : "uri_path", + "name" : "project_id", "required" : true, "schema" : { + "format" : "uuid", "type" : "string" } } ], "responses" : { "200" : { - "content" : { }, - "description" : "OK" - }, - "400" : { - "content" : { - "image/jpeg" : { - "schema" : { - "$ref" : "#/components/schemas/echo.HTTPError" - } - } - }, - "description" : "Bad Request" - }, - "404" : { "content" : { - "image/jpeg" : { + "application/json" : { "schema" : { - "$ref" : "#/components/schemas/echo.HTTPError" + "items" : { + "$ref" : "#/components/schemas/db.VUploaderConfig" + }, + "type" : "array" } } }, - "description" : "Not Found" + "description" : "OK" }, - "500" : { + "400" : { "content" : { - "image/jpeg" : { + "application/json" : { "schema" : { "$ref" : "#/components/schemas/echo.HTTPError" } } }, - "description" : "Internal Server Error" + "description" : "Bad Request" } }, - "summary" : "serves media, files, etc for a given project", - "tags" : [ "media" ] - } - }, - "/report_configs/{report_config_id}/plot_configs" : { - "get" : { + "summary" : "lists uploader configs for a project", + "tags" : [ "uploader" ] + }, + "post" : { "parameters" : [ { - "description" : "report config uuid", + "description" : "project uuid", "in" : "path", - "name" : "report_config_id", + "name" : "project_id", "required" : true, "schema" : { "format" : "uuid", "type" : "string" } - }, { - "description" : "api key", - "in" : "query", - "name" : "key", - "required" : true, - "schema" : { - "type" : "string" - } } ], + "requestBody" : { + "content" : { + "*/*" : { + "schema" : { + "$ref" : "#/components/schemas/dto.UploaderConfig" + } + } + }, + "description" : "uploader config payload", + "required" : true + }, "responses" : { - "200" : { + "201" : { "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/ReportConfigWithPlotConfigs" + "additionalProperties" : true, + "type" : "object" } } }, - "description" : "OK" + "description" : "Created" }, "400" : { "content" : { @@ -11367,18 +11364,47 @@ } }, "description" : "Bad Request" - }, - "404" : { + } + }, + "summary" : "creates an uploader config", + "tags" : [ "uploader" ], + "x-codegen-request-body-name" : "uploader_config" + } + }, + "/projects/{project_id}/uploader_configs/{uploader_config_id}" : { + "delete" : { + "parameters" : [ { + "description" : "project uuid", + "in" : "path", + "name" : "project_id", + "required" : true, + "schema" : { + "format" : "uuid", + "type" : "string" + } + }, { + "description" : "uploader config uuid", + "in" : "path", + "name" : "uploader_config_id", + "required" : true, + "schema" : { + "format" : "uuid", + "type" : "string" + } + } ], + "responses" : { + "200" : { "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/echo.HTTPError" + "additionalProperties" : true, + "type" : "object" } } }, - "description" : "Not Found" + "description" : "OK" }, - "500" : { + "400" : { "content" : { "application/json" : { "schema" : { @@ -11386,42 +11412,41 @@ } } }, - "description" : "Internal Server Error" + "description" : "Bad Request" } }, - "summary" : "Lists all plot configs for a report config", - "tags" : [ "report-config" ] - } - }, - "/report_jobs/{job_id}" : { + "summary" : "deletes an uploader config", + "tags" : [ "uploader" ] + }, "put" : { "parameters" : [ { - "description" : "download job uuid", + "description" : "project uuid", "in" : "path", - "name" : "job_id", + "name" : "project_id", "required" : true, "schema" : { "format" : "uuid", "type" : "string" } }, { - "description" : "api key", - "in" : "query", - "name" : "key", + "description" : "uploader config uuid", + "in" : "path", + "name" : "uploader_config_id", "required" : true, "schema" : { + "format" : "uuid", "type" : "string" } } ], "requestBody" : { "content" : { - "application/json" : { + "*/*" : { "schema" : { - "$ref" : "#/components/schemas/ReportDownloadJob" + "$ref" : "#/components/schemas/dto.UploaderConfig" } } }, - "description" : "report download job payload", + "description" : "uploader config payload", "required" : true }, "responses" : { @@ -11445,18 +11470,47 @@ } }, "description" : "Bad Request" - }, - "404" : { + } + }, + "summary" : "updates an uploader config", + "tags" : [ "uploader" ], + "x-codegen-request-body-name" : "uploader_config" + } + }, + "/projects/{project_id}/uploader_configs/{uploader_config_id}/mappings" : { + "delete" : { + "parameters" : [ { + "description" : "project uuid", + "in" : "path", + "name" : "project_id", + "required" : true, + "schema" : { + "format" : "uuid", + "type" : "string" + } + }, { + "description" : "uploader config uuid", + "in" : "path", + "name" : "uploader_config_id", + "required" : true, + "schema" : { + "format" : "uuid", + "type" : "string" + } + } ], + "responses" : { + "200" : { "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/echo.HTTPError" + "additionalProperties" : true, + "type" : "object" } } }, - "description" : "Not Found" + "description" : "OK" }, - "500" : { + "400" : { "content" : { "application/json" : { "schema" : { @@ -11464,29 +11518,29 @@ } } }, - "description" : "Internal Server Error" + "description" : "Bad Request" } }, - "summary" : "updates a job that creates a pdf report", - "tags" : [ "report-config" ], - "x-codegen-request-body-name" : "report_download_job" - } - }, - "/search/{entity}" : { + "summary" : "updates mappings for an uploader config", + "tags" : [ "uploader" ] + }, "get" : { "parameters" : [ { - "description" : "entity to search (i.e. projects, etc.)", + "description" : "project uuid", "in" : "path", - "name" : "entity", + "name" : "project_id", "required" : true, "schema" : { + "format" : "uuid", "type" : "string" } }, { - "description" : "search string", - "in" : "query", - "name" : "q", + "description" : "uploader config uuid", + "in" : "path", + "name" : "uploader_config_id", + "required" : true, "schema" : { + "format" : "uuid", "type" : "string" } } ], @@ -11496,7 +11550,7 @@ "application/json" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/SearchResult" + "$ref" : "#/components/schemas/db.UploaderConfigMapping" }, "type" : "array" } @@ -11513,18 +11567,58 @@ } }, "description" : "Bad Request" + } + }, + "summary" : "lists timeseries mappings for an uploader config", + "tags" : [ "uploader" ] + }, + "post" : { + "parameters" : [ { + "description" : "project uuid", + "in" : "path", + "name" : "project_id", + "required" : true, + "schema" : { + "format" : "uuid", + "type" : "string" + } + }, { + "description" : "uploader config uuid", + "in" : "path", + "name" : "uploader_config_id", + "required" : true, + "schema" : { + "format" : "uuid", + "type" : "string" + } + } ], + "requestBody" : { + "content" : { + "*/*" : { + "schema" : { + "items" : { + "$ref" : "#/components/schemas/dto.UploaderConfigMapping" + }, + "type" : "array" + } + } }, - "404" : { + "description" : "uploader config mappings payload", + "required" : true + }, + "responses" : { + "201" : { "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/echo.HTTPError" + "additionalProperties" : true, + "type" : "object" } } }, - "description" : "Not Found" + "description" : "Created" }, - "500" : { + "400" : { "content" : { "application/json" : { "schema" : { @@ -11532,32 +11626,47 @@ } } }, - "description" : "Internal Server Error" + "description" : "Bad Request" } }, - "summary" : "allows searching using a string on different entities", - "tags" : [ "search" ] - } - }, - "/submittals/{submittal_id}/verify_missing" : { + "summary" : "creates mappings for an uploader config", + "tags" : [ "uploader" ], + "x-codegen-request-body-name" : "uploader_config_mappings" + }, "put" : { "parameters" : [ { - "description" : "submittal uuid", + "description" : "project uuid", "in" : "path", - "name" : "submittal_id", + "name" : "project_id", "required" : true, "schema" : { "format" : "uuid", "type" : "string" } }, { - "description" : "api key", - "in" : "query", - "name" : "key", + "description" : "uploader config uuid", + "in" : "path", + "name" : "uploader_config_id", + "required" : true, "schema" : { + "format" : "uuid", "type" : "string" } } ], + "requestBody" : { + "content" : { + "*/*" : { + "schema" : { + "items" : { + "$ref" : "#/components/schemas/dto.UploaderConfigMapping" + }, + "type" : "array" + } + } + }, + "description" : "uploader config mappings payload", + "required" : true + }, "responses" : { "200" : { "content" : { @@ -11579,10 +11688,57 @@ } }, "description" : "Bad Request" + } + }, + "summary" : "updates mappings for an uploader config", + "tags" : [ "uploader" ], + "x-codegen-request-body-name" : "uploader_config_mappings" + } + }, + "/projects/{project_slug}/images/{uri_path}" : { + "get" : { + "parameters" : [ { + "description" : "project abbr", + "in" : "path", + "name" : "project_slug", + "required" : true, + "schema" : { + "type" : "string" + } + }, { + "description" : "uri path of requested resource", + "in" : "path", + "name" : "uri_path", + "required" : true, + "schema" : { + "type" : "string" + } + } ], + "responses" : { + "200" : { + "content" : { + "image/jpeg" : { + "schema" : { + "format" : "binary", + "type" : "string" + } + } + }, + "description" : "OK" + }, + "400" : { + "content" : { + "image/jpeg" : { + "schema" : { + "$ref" : "#/components/schemas/echo.HTTPError" + } + } + }, + "description" : "Bad Request" }, "404" : { "content" : { - "application/json" : { + "image/jpeg" : { "schema" : { "$ref" : "#/components/schemas/echo.HTTPError" } @@ -11592,7 +11748,7 @@ }, "500" : { "content" : { - "application/json" : { + "image/jpeg" : { "schema" : { "$ref" : "#/components/schemas/echo.HTTPError" } @@ -11601,46 +11757,36 @@ "description" : "Internal Server Error" } }, - "security" : [ { - "Bearer" : [ ] - } ], - "summary" : "verifies the specified submittal is \"missing\" and will not be completed", - "tags" : [ "submittal" ] + "summary" : "serves media, files, etc for a given project", + "tags" : [ "media" ] } }, - "/timeseries" : { - "post" : { + "/report_configs/{report_config_id}/plot_configs" : { + "get" : { "parameters" : [ { - "description" : "api key", - "in" : "query", + "description" : "report config uuid", + "in" : "path", + "name" : "report_config_id", + "required" : true, + "schema" : { + "format" : "uuid", + "type" : "string" + } + }, { + "description" : "api key", + "in" : "query", "name" : "key", + "required" : true, "schema" : { "type" : "string" } } ], - "requestBody" : { - "content" : { - "*/*" : { - "schema" : { - "$ref" : "#/components/schemas/TimeseriesCollectionItems" - } - } - }, - "description" : "timeseries collection items payload", - "required" : true - }, "responses" : { "200" : { "content" : { "application/json" : { "schema" : { - "items" : { - "additionalProperties" : { - "type" : "string" - }, - "type" : "object" - }, - "type" : "array" + "$ref" : "#/components/schemas/service.ReportConfigWithPlotConfigs" } } }, @@ -11677,20 +11823,16 @@ "description" : "Internal Server Error" } }, - "security" : [ { - "Bearer" : [ ] - } ], - "summary" : "creates one or more timeseries", - "tags" : [ "timeseries" ], - "x-codegen-request-body-name" : "timeseries_collection_items" + "summary" : "Lists all plot configs for a report config", + "tags" : [ "report-config" ] } }, - "/timeseries/{timeseries_id}" : { - "delete" : { + "/report_jobs/{job_id}" : { + "put" : { "parameters" : [ { - "description" : "timeseries uuid", + "description" : "download job uuid", "in" : "path", - "name" : "timeseries_id", + "name" : "job_id", "required" : true, "schema" : { "format" : "uuid", @@ -11700,10 +11842,22 @@ "description" : "api key", "in" : "query", "name" : "key", + "required" : true, "schema" : { "type" : "string" } } ], + "requestBody" : { + "content" : { + "application/json" : { + "schema" : { + "$ref" : "#/components/schemas/dto.ReportDownloadJob" + } + } + }, + "description" : "report download job payload", + "required" : true + }, "responses" : { "200" : { "content" : { @@ -11747,20 +11901,26 @@ "description" : "Internal Server Error" } }, - "security" : [ { - "Bearer" : [ ] - } ], - "summary" : "deletes a single timeseries by id", - "tags" : [ "timeseries" ] - }, + "summary" : "updates a job that creates a pdf report", + "tags" : [ "report-config" ], + "x-codegen-request-body-name" : "report_download_job" + } + }, + "/search/projects" : { "get" : { "parameters" : [ { - "description" : "timeseries uuid", + "description" : "entity to search (i.e. projects, etc.)", "in" : "path", - "name" : "timeseries_id", + "name" : "entity", "required" : true, "schema" : { - "format" : "uuid", + "type" : "string" + } + }, { + "description" : "search string", + "in" : "query", + "name" : "q", + "schema" : { "type" : "string" } } ], @@ -11769,7 +11929,10 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/Timeseries" + "items" : { + "$ref" : "#/components/schemas/db.VProject" + }, + "type" : "array" } } }, @@ -11806,14 +11969,16 @@ "description" : "Internal Server Error" } }, - "summary" : "gets a single timeseries by id", - "tags" : [ "timeseries" ] - }, + "summary" : "allows searching using a string on different entities", + "tags" : [ "search" ] + } + }, + "/submittals/{submittal_id}/verify_missing" : { "put" : { "parameters" : [ { - "description" : "timeseries uuid", + "description" : "submittal uuid", "in" : "path", - "name" : "timeseries_id", + "name" : "submittal_id", "required" : true, "schema" : { "format" : "uuid", @@ -11827,15 +11992,75 @@ "type" : "string" } } ], + "responses" : { + "200" : { + "content" : { + "application/json" : { + "schema" : { + "additionalProperties" : true, + "type" : "object" + } + } + }, + "description" : "OK" + }, + "400" : { + "content" : { + "application/json" : { + "schema" : { + "$ref" : "#/components/schemas/echo.HTTPError" + } + } + }, + "description" : "Bad Request" + }, + "404" : { + "content" : { + "application/json" : { + "schema" : { + "$ref" : "#/components/schemas/echo.HTTPError" + } + } + }, + "description" : "Not Found" + }, + "500" : { + "content" : { + "application/json" : { + "schema" : { + "$ref" : "#/components/schemas/echo.HTTPError" + } + } + }, + "description" : "Internal Server Error" + } + }, + "security" : [ { + "Bearer" : [ ] + } ], + "summary" : "verifies the specified submittal is \"missing\" and will not be completed", + "tags" : [ "submittal" ] + } + }, + "/timeseries" : { + "post" : { + "parameters" : [ { + "description" : "api key", + "in" : "query", + "name" : "key", + "schema" : { + "type" : "string" + } + } ], "requestBody" : { "content" : { "*/*" : { "schema" : { - "$ref" : "#/components/schemas/Timeseries" + "$ref" : "#/components/schemas/dto.TimeseriesCollectionItems" } } }, - "description" : "timeseries payload", + "description" : "timeseries collection items payload", "required" : true }, "responses" : { @@ -11886,12 +12111,12 @@ "security" : [ { "Bearer" : [ ] } ], - "summary" : "updates a single timeseries by id", + "summary" : "creates one or more timeseries", "tags" : [ "timeseries" ], - "x-codegen-request-body-name" : "timeseries" + "x-codegen-request-body-name" : "timeseries_collection_items" } }, - "/timeseries/{timeseries_id}/inclinometer_measurements" : { + "/timeseries/{timeseries_id}" : { "delete" : { "parameters" : [ { "description" : "timeseries uuid", @@ -11902,15 +12127,6 @@ "format" : "uuid", "type" : "string" } - }, { - "description" : "timestamp of measurement to delete", - "in" : "query", - "name" : "time", - "required" : true, - "schema" : { - "format" : "date-time", - "type" : "string" - } }, { "description" : "api key", "in" : "query", @@ -11965,8 +12181,8 @@ "security" : [ { "Bearer" : [ ] } ], - "summary" : "deletes a single inclinometer measurement by timestamp", - "tags" : [ "measurement-inclinometer" ] + "summary" : "deletes a single timeseries by id", + "tags" : [ "timeseries" ] }, "get" : { "parameters" : [ { @@ -11978,29 +12194,87 @@ "format" : "uuid", "type" : "string" } - }, { - "description" : "after timestamp", - "in" : "query", - "name" : "after", + } ], + "responses" : { + "200" : { + "content" : { + "application/json" : { + "schema" : { + "$ref" : "#/components/schemas/db.VTimeseries" + } + } + }, + "description" : "OK" + }, + "400" : { + "content" : { + "application/json" : { + "schema" : { + "$ref" : "#/components/schemas/echo.HTTPError" + } + } + }, + "description" : "Bad Request" + }, + "404" : { + "content" : { + "application/json" : { + "schema" : { + "$ref" : "#/components/schemas/echo.HTTPError" + } + } + }, + "description" : "Not Found" + }, + "500" : { + "content" : { + "application/json" : { + "schema" : { + "$ref" : "#/components/schemas/echo.HTTPError" + } + } + }, + "description" : "Internal Server Error" + } + }, + "summary" : "gets a single timeseries by id", + "tags" : [ "timeseries" ] + }, + "put" : { + "parameters" : [ { + "description" : "timeseries uuid", + "in" : "path", + "name" : "timeseries_id", + "required" : true, "schema" : { - "format" : "date-time", + "format" : "uuid", "type" : "string" } }, { - "description" : "before timestamp", + "description" : "api key", "in" : "query", - "name" : "before", + "name" : "key", "schema" : { - "format" : "date-time", "type" : "string" } } ], + "requestBody" : { + "content" : { + "*/*" : { + "schema" : { + "$ref" : "#/components/schemas/dto.Timeseries" + } + } + }, + "description" : "timeseries payload", + "required" : true + }, "responses" : { "200" : { "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/InclinometerMeasurementCollection" + "$ref" : "#/components/schemas/dto.Timeseries" } } }, @@ -12037,8 +12311,12 @@ "description" : "Internal Server Error" } }, - "summary" : "lists all measurements for an inclinometer", - "tags" : [ "measurement-inclinometer" ] + "security" : [ { + "Bearer" : [ ] + } ], + "summary" : "updates a single timeseries by id", + "tags" : [ "timeseries" ], + "x-codegen-request-body-name" : "timeseries" } }, "/timeseries/{timeseries_id}/measurements" : { @@ -12157,7 +12435,10 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/MeasurementCollection" + "items" : { + "$ref" : "#/components/schemas/db.MeasurementCollection" + }, + "type" : "array" } } }, @@ -12200,25 +12481,19 @@ }, "/timeseries_measurements" : { "post" : { - "parameters" : [ { - "description" : "api key", - "in" : "query", - "name" : "key", - "required" : true, - "schema" : { - "type" : "string" - } - } ], "requestBody" : { "content" : { - "*/*" : { + "application/json" : { + "schema" : { + "$ref" : "#/components/schemas/_timeseries_measurements_post_request" + } + }, + "multipart/form-data" : { "schema" : { - "$ref" : "#/components/schemas/TimeseriesMeasurementCollectionCollection" + "$ref" : "#/components/schemas/_timeseries_measurements_post_request" } } - }, - "description" : "array of timeseries measurement collections", - "required" : true + } }, "responses" : { "200" : { @@ -12226,7 +12501,8 @@ "application/json" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/MeasurementCollection" + "additionalProperties" : true, + "type" : "object" }, "type" : "array" } @@ -12265,7 +12541,10 @@ "description" : "Internal Server Error" } }, - "summary" : "creates or updates one or more timeseries measurements", + "security" : [ { + "Bearer" : [ ] + } ], + "summary" : "creates one or more timeseries measurements", "tags" : [ "measurement" ], "x-codegen-request-body-name" : "timeseries_measurement_collections" } @@ -12278,7 +12557,7 @@ "application/json" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/Unit" + "$ref" : "#/components/schemas/db.VUnit" }, "type" : "array" } @@ -12312,47 +12591,42 @@ }, "type" : "object" }, - "geojson.Geometry" : { + "db.AlertGetRow" : { "properties" : { - "coordinates" : { - "type" : "object" + "alert_config_id" : { + "type" : "string" }, - "geometries" : { - "items" : { - "$ref" : "#/components/schemas/geojson.Geometry" - }, - "type" : "array" + "body" : { + "type" : "string" }, - "type" : { + "created_at" : { "type" : "string" - } - }, - "type" : "object" - }, - "AggregatePlotConfigMeasurementsContourPlot" : { - "properties" : { - "x" : { - "items" : { - "type" : "number" - }, - "type" : "array" }, - "y" : { - "items" : { - "type" : "number" - }, - "type" : "array" + "id" : { + "type" : "string" }, - "z" : { + "instruments" : { "items" : { - "type" : "number" + "$ref" : "#/components/schemas/db.InstrumentIDName" }, "type" : "array" + }, + "name" : { + "type" : "string" + }, + "project_id" : { + "type" : "string" + }, + "project_name" : { + "type" : "string" + }, + "read" : { + "type" : "boolean" } }, "type" : "object" }, - "Alert" : { + "db.AlertListForProfileRow" : { "properties" : { "alert_config_id" : { "type" : "string" @@ -12360,7 +12634,7 @@ "body" : { "type" : "string" }, - "create_date" : { + "created_at" : { "type" : "string" }, "id" : { @@ -12368,7 +12642,7 @@ }, "instruments" : { "items" : { - "$ref" : "#/components/schemas/AlertConfigInstrument" + "$ref" : "#/components/schemas/db.InstrumentIDName" }, "type" : "array" }, @@ -12387,275 +12661,1908 @@ }, "type" : "object" }, - "AlertConfig" : { + "db.AlertProfileSubscription" : { "properties" : { - "alert_email_subscriptions" : { - "items" : { - "$ref" : "#/components/schemas/EmailAutocompleteResult" - }, - "type" : "array" + "alert_config_id" : { + "type" : "string" }, - "alert_type" : { + "id" : { "type" : "string" }, - "alert_type_id" : { + "mute_notify" : { + "type" : "boolean" + }, + "mute_ui" : { + "type" : "boolean" + }, + "profile_id" : { + "type" : "string" + } + }, + "type" : "object" + }, + "db.AwareParameterListRow" : { + "properties" : { + "id" : { "type" : "string" }, - "body" : { + "key" : { "type" : "string" }, - "create_date" : { + "parameter_id" : { "type" : "string" }, - "creator_id" : { + "unit_id" : { + "type" : "string" + } + }, + "type" : "object" + }, + "db.CollectionGroup" : { + "properties" : { + "created_at" : { "type" : "string" }, - "creator_username" : { + "created_by" : { "type" : "string" }, "id" : { "type" : "string" }, - "instruments" : { - "items" : { - "$ref" : "#/components/schemas/AlertConfigInstrument" - }, - "type" : "array" + "name" : { + "type" : "string" }, - "last_checked" : { + "project_id" : { "type" : "string" }, - "last_reminded" : { + "slug" : { "type" : "string" }, - "mute_consecutive_alerts" : { + "sort_order" : { + "type" : "integer" + }, + "updated_at" : { + "type" : "string" + }, + "updated_by" : { + "type" : "string" + } + }, + "type" : "object" + }, + "db.CollectionGroupDetailsTimeseries" : { + "properties" : { + "id" : { + "type" : "string" + }, + "instrument" : { + "type" : "string" + }, + "instrument_id" : { + "type" : "string" + }, + "instrument_slug" : { + "type" : "string" + }, + "is_computed" : { "type" : "boolean" }, + "latest_time" : { + "type" : "string" + }, + "latest_value" : { + "type" : "number" + }, "name" : { "type" : "string" }, - "project_id" : { + "parameter" : { "type" : "string" }, - "project_name" : { + "parameter_id" : { "type" : "string" }, - "remind_interval" : { + "slug" : { + "type" : "string" + }, + "sort_order" : { + "type" : "integer" + }, + "type" : { + "$ref" : "#/components/schemas/db.TimeseriesType" + }, + "unit" : { + "type" : "string" + }, + "unit_id" : { + "type" : "string" + }, + "variable" : { + "type" : "object" + } + }, + "type" : "object" + }, + "db.DataloggerEquivalencyTableField" : { + "properties" : { + "display_name" : { + "type" : "string" + }, + "field_name" : { + "type" : "string" + }, + "id" : { + "type" : "string" + }, + "instrument_id" : { + "type" : "string" + }, + "timeseries_id" : { + "type" : "string" + } + }, + "type" : "object" + }, + "db.DataloggerTableIDName" : { + "properties" : { + "id" : { + "type" : "string" + }, + "table_name" : { + "type" : "string" + } + }, + "type" : "object" + }, + "db.DomainGroupOpt" : { + "properties" : { + "description" : { + "type" : "string" + }, + "id" : { + "type" : "string" + }, + "value" : { + "type" : "string" + } + }, + "type" : "object" + }, + "db.EmailAutocompleteListRow" : { + "properties" : { + "email" : { + "type" : "string" + }, + "id" : { + "type" : "string" + }, + "user_type" : { + "type" : "string" + }, + "username" : { + "type" : "object" + } + }, + "type" : "object" + }, + "db.EmailAutocompleteResult" : { + "properties" : { + "email" : { + "type" : "string" + }, + "id" : { + "type" : "string" + }, + "user_type" : { + "type" : "string" + }, + "username" : { + "type" : "string" + } + }, + "type" : "object" + }, + "db.HomeGetRow" : { + "properties" : { + "instrument_count" : { + "type" : "integer" + }, + "instrument_group_count" : { + "type" : "integer" + }, + "new_instruments_7d" : { + "type" : "integer" + }, + "new_measurements_2h" : { + "type" : "integer" + }, + "project_count" : { + "type" : "integer" + } + }, + "type" : "object" + }, + "db.IDSlugName" : { + "properties" : { + "id" : { + "type" : "string" + }, + "name" : { + "type" : "string" + }, + "slug" : { + "type" : "string" + } + }, + "type" : "object" + }, + "db.InstrumentCreateBatchRow" : { + "properties" : { + "id" : { + "type" : "string" + }, + "slug" : { + "type" : "string" + } + }, + "type" : "object" + }, + "db.InstrumentGroup" : { + "properties" : { + "created_at" : { + "type" : "string" + }, + "created_by" : { + "type" : "string" + }, + "deleted" : { + "type" : "boolean" + }, + "description" : { + "type" : "string" + }, + "id" : { + "type" : "string" + }, + "name" : { + "type" : "string" + }, + "project_id" : { + "type" : "string" + }, + "slug" : { + "type" : "string" + }, + "updated_at" : { + "type" : "string" + }, + "updated_by" : { + "type" : "string" + } + }, + "type" : "object" + }, + "db.InstrumentGroupUpdateRow" : { + "properties" : { + "created_at" : { + "type" : "string" + }, + "created_by" : { + "type" : "string" + }, + "description" : { + "type" : "string" + }, + "id" : { + "type" : "string" + }, + "name" : { + "type" : "string" + }, + "project_id" : { + "type" : "string" + }, + "slug" : { + "type" : "string" + }, + "updated_at" : { + "type" : "string" + }, + "updated_by" : { + "type" : "string" + } + }, + "type" : "object" + }, + "db.InstrumentIDName" : { + "properties" : { + "instrument_id" : { + "type" : "string" + }, + "instrument_name" : { + "type" : "string" + } + }, + "type" : "object" + }, + "db.InstrumentNote" : { + "properties" : { + "body" : { + "type" : "string" + }, + "created_at" : { + "type" : "string" + }, + "created_by" : { + "type" : "string" + }, + "id" : { + "type" : "string" + }, + "instrument_id" : { + "type" : "string" + }, + "time" : { + "type" : "string" + }, + "title" : { + "type" : "string" + }, + "updated_at" : { + "type" : "string" + }, + "updated_by" : { + "type" : "string" + } + }, + "type" : "object" + }, + "db.IpiMeasurement" : { + "properties" : { + "cum_dev" : { + "type" : "number" + }, + "elevation" : { + "type" : "number" + }, + "inc_dev" : { + "type" : "number" + }, + "segment_id" : { + "type" : "integer" + }, + "temp" : { + "type" : "number" + }, + "tilt" : { + "type" : "number" + } + }, + "type" : "object" + }, + "db.JobStatus" : { + "enum" : [ "SUCCESS", "FAIL", "INIT" ], + "type" : "string", + "x-enum-varnames" : [ "JobStatusSUCCESS", "JobStatusFAIL", "JobStatusINIT" ] + }, + "db.Measurement" : { + "properties" : { + "annotation" : { + "type" : "string" + }, + "error" : { + "type" : "string" + }, + "masked" : { + "type" : "boolean" + }, + "time" : { + "type" : "string" + }, + "validated" : { + "type" : "boolean" + }, + "value" : { + "type" : "number" + } + }, + "type" : "object" + }, + "db.MeasurementCollection" : { + "properties" : { + "items" : { + "items" : { + "$ref" : "#/components/schemas/db.Measurement" + }, + "type" : "array" + }, + "timeseries_id" : { + "type" : "string" + } + }, + "type" : "object" + }, + "db.MeasurementCollectionLean" : { + "properties" : { + "items" : { + "items" : { + "$ref" : "#/components/schemas/db.MeasurementLean" + }, + "type" : "array" + }, + "timeseries_id" : { + "type" : "string" + } + }, + "type" : "object" + }, + "db.MeasurementLean" : { + "additionalProperties" : { + "type" : "number" + }, + "type" : "object" + }, + "db.PgTimezoneNamesListRow" : { + "properties" : { + "abbrev" : { + "type" : "string" + }, + "is_dst" : { + "type" : "boolean" + }, + "name" : { + "type" : "string" + }, + "utc_offset" : { + "type" : "string" + } + }, + "type" : "object" + }, + "db.PlotConfigMeasurementListBullseyeRow" : { + "properties" : { + "time" : { + "type" : "string" + }, + "x" : { + "type" : "object" + }, + "y" : { + "type" : "object" + } + }, + "type" : "object" + }, + "db.PlotType" : { + "enum" : [ "scatter-line", "profile", "contour", "bullseye" ], + "type" : "string", + "x-enum-varnames" : [ "PlotTypeScatterLine", "PlotTypeProfile", "PlotTypeContour", "PlotTypeBullseye" ] + }, + "db.ProfileCreateRow" : { + "properties" : { + "display_name" : { + "type" : "string" + }, + "email" : { + "type" : "string" + }, + "id" : { + "type" : "string" + }, + "username" : { + "type" : "string" + } + }, + "type" : "object" + }, + "db.ProfileProjectRoleGetRow" : { + "properties" : { + "email" : { + "type" : "string" + }, + "id" : { + "type" : "string" + }, + "profile_id" : { + "type" : "string" + }, + "role" : { + "type" : "string" + }, + "role_id" : { + "type" : "string" + }, + "username" : { + "type" : "string" + } + }, + "type" : "object" + }, + "db.ProfileProjectRoleListForProjectRow" : { + "properties" : { + "email" : { + "type" : "string" + }, + "id" : { + "type" : "string" + }, + "profile_id" : { + "type" : "string" + }, + "role" : { + "type" : "string" + }, + "role_id" : { + "type" : "string" + }, + "username" : { + "type" : "string" + } + }, + "type" : "object" + }, + "db.ProjectCreateBatchRow" : { + "properties" : { + "id" : { + "type" : "string" + }, + "slug" : { + "type" : "string" + } + }, + "type" : "object" + }, + "db.ReportConfigGlobalOverrides" : { + "properties" : { + "date_range" : { + "$ref" : "#/components/schemas/db.TextOption" + }, + "show_masked" : { + "$ref" : "#/components/schemas/db.ToggleOption" + }, + "show_nonvalidated" : { + "$ref" : "#/components/schemas/db.ToggleOption" + } + }, + "type" : "object" + }, + "db.ReportDownloadJob" : { + "properties" : { + "created_at" : { + "type" : "string" + }, + "created_by" : { + "type" : "string" + }, + "file_expiry" : { + "type" : "string" + }, + "file_key" : { + "type" : "string" + }, + "id" : { + "type" : "string" + }, + "progress" : { + "type" : "integer" + }, + "progress_updated_at" : { + "type" : "string" + }, + "report_config_id" : { + "type" : "string" + }, + "status" : { + "$ref" : "#/components/schemas/db.JobStatus" + } + }, + "type" : "object" + }, + "db.SaaMeasurement" : { + "properties" : { + "elevation" : { + "type" : "number" + }, + "segment_id" : { + "type" : "integer" + }, + "temp" : { + "type" : "number" + }, + "temp_cum_dev" : { + "type" : "number" + }, + "temp_increment" : { + "type" : "number" + }, + "x" : { + "type" : "number" + }, + "x_cum_dev" : { + "type" : "number" + }, + "x_increment" : { + "type" : "number" + }, + "y" : { + "type" : "number" + }, + "y_cum_dev" : { + "type" : "number" + }, + "y_increment" : { + "type" : "number" + }, + "z" : { + "type" : "number" + }, + "z_cum_dev" : { + "type" : "number" + }, + "z_increment" : { + "type" : "number" + } + }, + "type" : "object" + }, + "db.TextOption" : { + "properties" : { + "enabled" : { + "type" : "boolean" + }, + "value" : { + "type" : "string" + } + }, + "type" : "object" + }, + "db.TimeseriesComputedListForInstrumentRow" : { + "properties" : { + "formula" : { + "type" : "string" + }, + "formula_name" : { + "type" : "string" + }, + "id" : { + "type" : "string" + }, + "instrument_id" : { + "type" : "string" + }, + "parameter_id" : { + "type" : "string" + }, + "slug" : { + "type" : "string" + }, + "unit_id" : { + "type" : "string" + } + }, + "type" : "object" + }, + "db.TimeseriesCreateBatchRow" : { + "properties" : { + "id" : { + "type" : "string" + }, + "instrument_id" : { + "type" : "string" + }, + "name" : { + "type" : "string" + }, + "parameter_id" : { + "type" : "string" + }, + "slug" : { + "type" : "string" + }, + "type" : { + "$ref" : "#/components/schemas/db.TimeseriesType" + }, + "unit_id" : { + "type" : "string" + } + }, + "type" : "object" + }, + "db.TimeseriesType" : { + "enum" : [ "standard", "constant", "computed", "cwms" ], + "type" : "string", + "x-enum-varnames" : [ "TimeseriesTypeStandard", "TimeseriesTypeConstant", "TimeseriesTypeComputed", "TimeseriesTypeCwms" ] + }, + "db.ToggleOption" : { + "properties" : { + "enabled" : { + "type" : "boolean" + }, + "value" : { + "type" : "boolean" + } + }, + "type" : "object" + }, + "db.UploaderConfigMapping" : { + "properties" : { + "field_name" : { + "type" : "string" + }, + "timeseries_id" : { + "type" : "string" + }, + "uploader_config_id" : { + "type" : "string" + } + }, + "type" : "object" + }, + "db.UploaderConfigType" : { + "enum" : [ "csv", "dux", "toa5" ], + "type" : "string", + "x-enum-varnames" : [ "UploaderConfigTypeCsv", "UploaderConfigTypeDux", "UploaderConfigTypeToa5" ] + }, + "db.VAlert" : { + "properties" : { + "alert_config_id" : { + "type" : "string" + }, + "body" : { + "type" : "string" + }, + "created_at" : { + "type" : "string" + }, + "id" : { + "type" : "string" + }, + "instruments" : { + "items" : { + "$ref" : "#/components/schemas/db.InstrumentIDName" + }, + "type" : "array" + }, + "name" : { + "type" : "string" + }, + "project_id" : { + "type" : "string" + }, + "project_name" : { + "type" : "string" + } + }, + "type" : "object" + }, + "db.VAlertConfig" : { + "properties" : { + "alert_email_subscriptions" : { + "items" : { + "$ref" : "#/components/schemas/db.EmailAutocompleteResult" + }, + "type" : "array" + }, + "alert_type" : { + "type" : "string" + }, + "alert_type_id" : { + "type" : "string" + }, + "body" : { + "type" : "string" + }, + "create_next_submittal_from" : { + "type" : "string" + }, + "created_at" : { + "type" : "string" + }, + "created_by" : { + "type" : "string" + }, + "created_by_username" : { + "type" : "string" + }, + "id" : { + "type" : "string" + }, + "instruments" : { + "items" : { + "$ref" : "#/components/schemas/db.InstrumentIDName" + }, + "type" : "array" + }, + "last_checked_at" : { + "type" : "string" + }, + "last_reminded_at" : { + "type" : "string" + }, + "mute_consecutive_alerts" : { + "type" : "boolean" + }, + "name" : { + "type" : "string" + }, + "project_id" : { + "type" : "string" + }, + "project_name" : { + "type" : "string" + }, + "remind_interval" : { + "type" : "string" + }, + "schedule_interval" : { + "type" : "string" + }, + "started_at" : { + "type" : "string" + }, + "updated_at" : { + "type" : "string" + }, + "updated_by" : { + "type" : "string" + }, + "updated_by_username" : { + "type" : "string" + }, + "warning_interval" : { + "type" : "string" + } + }, + "type" : "object" + }, + "db.VCollectionGroupDetails" : { + "properties" : { + "created_at" : { + "type" : "string" + }, + "created_by" : { + "type" : "string" + }, + "id" : { + "type" : "string" + }, + "name" : { + "type" : "string" + }, + "project_id" : { + "type" : "string" + }, + "slug" : { + "type" : "string" + }, + "sort_order" : { + "type" : "integer" + }, + "timeseries" : { + "items" : { + "$ref" : "#/components/schemas/db.CollectionGroupDetailsTimeseries" + }, + "type" : "array" + }, + "updated_at" : { + "type" : "string" + }, + "updated_by" : { + "type" : "string" + } + }, + "type" : "object" + }, + "db.VDatalogger" : { + "properties" : { + "created_at" : { + "type" : "string" + }, + "created_by" : { + "type" : "string" + }, + "created_by_username" : { + "type" : "string" + }, + "errors" : { + "items" : { + "type" : "string" + }, + "type" : "array" + }, + "id" : { + "type" : "string" + }, + "model" : { + "type" : "string" + }, + "model_id" : { + "type" : "string" + }, + "name" : { + "type" : "string" + }, + "project_id" : { + "type" : "string" + }, + "slug" : { + "type" : "string" + }, + "sn" : { + "type" : "string" + }, + "tables" : { + "items" : { + "$ref" : "#/components/schemas/db.DataloggerTableIDName" + }, + "type" : "array" + }, + "updated_at" : { + "type" : "string" + }, + "updated_by" : { + "type" : "string" + }, + "updated_by_username" : { + "type" : "string" + } + }, + "type" : "object" + }, + "db.VDataloggerEquivalencyTable" : { + "properties" : { + "datalogger_id" : { + "type" : "string" + }, + "datalogger_table_id" : { + "type" : "string" + }, + "datalogger_table_name" : { + "type" : "string" + }, + "fields" : { + "items" : { + "$ref" : "#/components/schemas/db.DataloggerEquivalencyTableField" + }, + "type" : "array" + } + }, + "type" : "object" + }, + "db.VDataloggerPreview" : { + "properties" : { + "datalogger_table_id" : { + "type" : "string" + }, + "preview" : { + "items" : { + "type" : "integer" + }, + "type" : "array" + }, + "updated_at" : { + "type" : "string" + } + }, + "type" : "object" + }, + "db.VDistrict" : { + "properties" : { + "agency" : { + "type" : "string" + }, + "division_initials" : { + "type" : "string" + }, + "division_name" : { + "type" : "string" + }, + "id" : { + "type" : "string" + }, + "initials" : { + "type" : "string" + }, + "name" : { + "type" : "string" + }, + "office_id" : { + "type" : "string" + } + }, + "type" : "object" + }, + "db.VDistrictRollup" : { + "properties" : { + "actual_total_submittals" : { + "type" : "integer" + }, + "alert_type_id" : { + "type" : "string" + }, + "district_initials" : { + "type" : "string" + }, + "expected_total_submittals" : { + "type" : "integer" + }, + "green_submittals" : { + "type" : "integer" + }, + "month" : { + "type" : "string" + }, + "office_id" : { + "type" : "string" + }, + "project_id" : { + "type" : "string" + }, + "project_name" : { + "type" : "string" + }, + "red_submittals" : { + "type" : "integer" + }, + "yellow_submittals" : { + "type" : "integer" + } + }, + "type" : "object" + }, + "db.VDomain" : { + "properties" : { + "description" : { + "type" : "string" + }, + "group" : { + "type" : "string" + }, + "id" : { + "type" : "string" + }, + "value" : { + "type" : "string" + } + }, + "type" : "object" + }, + "db.VEvaluation" : { + "properties" : { + "alert_config_id" : { + "type" : "string" + }, + "alert_config_name" : { + "type" : "string" + }, + "body" : { + "type" : "string" + }, + "created_at" : { + "type" : "string" + }, + "created_by" : { + "type" : "string" + }, + "created_by_username" : { + "type" : "string" + }, + "ended_at" : { + "type" : "string" + }, + "id" : { + "type" : "string" + }, + "instruments" : { + "items" : { + "$ref" : "#/components/schemas/db.InstrumentIDName" + }, + "type" : "array" + }, + "name" : { + "type" : "string" + }, + "project_id" : { + "type" : "string" + }, + "project_name" : { + "type" : "string" + }, + "started_at" : { + "type" : "string" + }, + "submittal_id" : { + "type" : "string" + }, + "updated_at" : { + "type" : "string" + }, + "updated_by" : { + "type" : "string" + }, + "updated_by_username" : { + "type" : "string" + } + }, + "type" : "object" + }, + "db.VInclMeasurement" : { + "properties" : { + "instrument_id" : { + "type" : "string" + }, + "measurements" : { + "type" : "object" + }, + "time" : { + "type" : "string" + } + }, + "type" : "object" + }, + "db.VInclSegment" : { + "properties" : { + "a0_timeseries_id" : { + "type" : "string" + }, + "a180_timeseries_id" : { + "type" : "string" + }, + "b0_timeseries_id" : { + "type" : "string" + }, + "b180_timeseries_id" : { + "type" : "string" + }, + "depth_timeseries_id" : { + "type" : "string" + }, + "id" : { + "type" : "integer" + }, + "instrument_id" : { + "type" : "string" + } + }, + "type" : "object" + }, + "db.VInstrument" : { + "properties" : { + "alert_configs" : { + "items" : { + "type" : "string" + }, + "type" : "array" + }, + "constants" : { + "items" : { + "type" : "string" + }, + "type" : "array" + }, + "created_at" : { + "type" : "string" + }, + "created_by" : { + "type" : "string" + }, + "geometry" : { + "items" : { + "type" : "integer" + }, + "type" : "array" + }, + "groups" : { + "items" : { + "type" : "string" + }, + "type" : "array" + }, + "has_cwms" : { + "type" : "boolean" + }, + "icon" : { + "type" : "string" + }, + "id" : { + "type" : "string" + }, + "name" : { + "type" : "string" + }, + "nid_id" : { + "type" : "string" + }, + "offset" : { + "type" : "integer" + }, + "opts" : { + "type" : "object" + }, + "projects" : { + "items" : { + "$ref" : "#/components/schemas/db.IDSlugName" + }, + "type" : "array" + }, + "show_cwms_tab" : { + "type" : "boolean" + }, + "slug" : { + "type" : "string" + }, + "station" : { + "type" : "integer" + }, + "status" : { + "type" : "string" + }, + "status_id" : { + "type" : "string" + }, + "status_time" : { + "type" : "string" + }, + "telemetry" : { + "items" : { + "$ref" : "#/components/schemas/db.IDSlugName" + }, + "type" : "array" + }, + "type" : { + "type" : "string" + }, + "type_id" : { + "type" : "string" + }, + "updated_at" : { + "type" : "string" + }, + "updated_by" : { + "type" : "string" + }, + "usgs_id" : { + "type" : "string" + } + }, + "type" : "object" + }, + "db.VInstrumentGroup" : { + "properties" : { + "created_at" : { + "type" : "string" + }, + "created_by" : { + "type" : "string" + }, + "description" : { + "type" : "string" + }, + "id" : { + "type" : "string" + }, + "instrument_count" : { + "type" : "integer" + }, + "name" : { + "type" : "string" + }, + "project_id" : { + "type" : "string" + }, + "slug" : { + "type" : "string" + }, + "timeseries_count" : { + "type" : "object" + }, + "updated_at" : { + "type" : "string" + }, + "updated_by" : { + "type" : "string" + } + }, + "type" : "object" + }, + "db.VInstrumentStatus" : { + "properties" : { + "id" : { + "type" : "string" + }, + "instrument_id" : { + "type" : "string" + }, + "status" : { + "type" : "string" + }, + "status_id" : { + "type" : "string" + }, + "time" : { + "type" : "string" + } + }, + "type" : "object" + }, + "db.VIpiMeasurement" : { + "properties" : { + "instrument_id" : { + "type" : "string" + }, + "measurements" : { + "items" : { + "$ref" : "#/components/schemas/db.IpiMeasurement" + }, + "type" : "array" + }, + "time" : { + "type" : "string" + } + }, + "type" : "object" + }, + "db.VIpiSegment" : { + "properties" : { + "id" : { + "type" : "integer" + }, + "inc_dev_timeseries_id" : { + "type" : "string" + }, + "instrument_id" : { + "type" : "string" + }, + "length" : { + "type" : "number" + }, + "length_timeseries_id" : { + "type" : "string" + }, + "tilt_timeseries_id" : { + "type" : "string" + } + }, + "type" : "object" + }, + "db.VPlotConfiguration" : { + "properties" : { + "auto_range" : { + "type" : "boolean" + }, + "created_at" : { + "type" : "string" + }, + "created_by" : { + "type" : "string" + }, + "date_range" : { + "type" : "string" + }, + "display" : { + "type" : "object" + }, + "id" : { + "type" : "string" + }, + "name" : { + "type" : "string" + }, + "plot_type" : { + "$ref" : "#/components/schemas/db.PlotType" + }, + "project_id" : { + "type" : "string" + }, + "report_configs" : { + "items" : { + "$ref" : "#/components/schemas/db.IDSlugName" + }, + "type" : "array" + }, + "show_comments" : { + "type" : "boolean" + }, + "show_masked" : { + "type" : "boolean" + }, + "show_nonvalidated" : { + "type" : "boolean" + }, + "slug" : { + "type" : "string" + }, + "threshold" : { + "type" : "integer" + }, + "updated_at" : { + "type" : "string" + }, + "updated_by" : { + "type" : "string" + } + }, + "type" : "object" + }, + "db.VProfile" : { + "properties" : { + "display_name" : { + "type" : "string" + }, + "edipi" : { + "type" : "integer" + }, + "email" : { + "type" : "string" + }, + "id" : { + "type" : "string" + }, + "is_admin" : { + "type" : "boolean" + }, + "roles" : { + "items" : { + "type" : "string" + }, + "type" : "array" + }, + "tokens" : { + "items" : { + "$ref" : "#/components/schemas/db.VProfileToken" + }, + "type" : "array" + }, + "username" : { + "type" : "string" + } + }, + "type" : "object" + }, + "db.VProfileToken" : { + "properties" : { + "issued" : { + "type" : "string" + }, + "token_id" : { + "type" : "string" + } + }, + "type" : "object" + }, + "db.VProject" : { + "properties" : { + "created_at" : { + "type" : "string" + }, + "created_by" : { + "type" : "string" + }, + "created_by_username" : { + "type" : "string" + }, + "district_id" : { + "type" : "string" + }, + "federal_id" : { + "type" : "string" + }, + "id" : { + "type" : "string" + }, + "image" : { + "type" : "object" + }, + "instrument_count" : { + "type" : "integer" + }, + "instrument_group_count" : { + "type" : "integer" + }, + "name" : { + "type" : "string" + }, + "office_id" : { + "type" : "string" + }, + "slug" : { + "type" : "string" + }, + "updated_at" : { + "type" : "string" + }, + "updated_by" : { + "type" : "string" + }, + "updated_by_username" : { + "type" : "string" + } + }, + "type" : "object" + }, + "db.VReportConfig" : { + "properties" : { + "created_at" : { + "type" : "string" + }, + "created_by" : { + "type" : "string" + }, + "created_by_username" : { + "type" : "string" + }, + "description" : { + "type" : "string" + }, + "district_name" : { + "type" : "string" + }, + "global_overrides" : { + "$ref" : "#/components/schemas/db.ReportConfigGlobalOverrides" + }, + "id" : { + "type" : "string" + }, + "name" : { + "type" : "string" + }, + "plot_configs" : { + "items" : { + "$ref" : "#/components/schemas/db.IDSlugName" + }, + "type" : "array" + }, + "project_id" : { + "type" : "string" + }, + "project_name" : { + "type" : "string" + }, + "slug" : { + "type" : "string" + }, + "updated_at" : { + "type" : "string" + }, + "updated_by" : { + "type" : "string" + }, + "updated_by_username" : { + "type" : "string" + } + }, + "type" : "object" + }, + "db.VSaaMeasurement" : { + "properties" : { + "instrument_id" : { + "type" : "string" + }, + "measurements" : { + "items" : { + "$ref" : "#/components/schemas/db.SaaMeasurement" + }, + "type" : "array" + }, + "time" : { + "type" : "string" + } + }, + "type" : "object" + }, + "db.VSaaSegment" : { + "properties" : { + "id" : { + "type" : "integer" + }, + "instrument_id" : { + "type" : "string" + }, + "length" : { + "type" : "number" + }, + "length_timeseries_id" : { + "type" : "string" + }, + "temp_timeseries_id" : { + "type" : "string" + }, + "x_timeseries_id" : { + "type" : "string" + }, + "y_timeseries_id" : { + "type" : "string" + }, + "z_timeseries_id" : { + "type" : "string" + } + }, + "type" : "object" + }, + "db.VSubmittal" : { + "properties" : { + "alert_config_id" : { + "type" : "string" + }, + "alert_config_name" : { + "type" : "string" + }, + "alert_type_id" : { + "type" : "string" + }, + "alert_type_name" : { + "type" : "string" + }, + "completed_at" : { + "type" : "string" + }, + "created_at" : { + "type" : "string" + }, + "due_at" : { + "type" : "string" + }, + "id" : { + "type" : "string" + }, + "marked_as_missing" : { + "type" : "boolean" + }, + "project_id" : { + "type" : "string" + }, + "submittal_status_id" : { + "type" : "string" + }, + "submittal_status_name" : { + "type" : "string" + }, + "warning_sent" : { + "type" : "boolean" + } + }, + "type" : "object" + }, + "db.VTimeseries" : { + "properties" : { + "id" : { + "type" : "string" + }, + "instrument" : { + "type" : "string" + }, + "instrument_id" : { + "type" : "string" + }, + "instrument_slug" : { + "type" : "string" + }, + "is_computed" : { + "type" : "boolean" + }, + "name" : { + "type" : "string" + }, + "parameter" : { + "type" : "string" + }, + "parameter_id" : { + "type" : "string" + }, + "slug" : { + "type" : "string" + }, + "type" : { + "$ref" : "#/components/schemas/db.TimeseriesType" + }, + "unit" : { + "type" : "string" + }, + "unit_id" : { + "type" : "string" + }, + "variable" : { + "type" : "object" + } + }, + "type" : "object" + }, + "db.VTimeseriesCwms" : { + "properties" : { + "cwms_extent_earliest_time" : { "type" : "string" }, - "schedule_interval" : { + "cwms_extent_latest_time" : { "type" : "string" }, - "start_date" : { + "cwms_office_id" : { "type" : "string" }, - "update_date" : { + "cwms_timeseries_id" : { "type" : "string" }, - "updater_id" : { + "id" : { "type" : "string" }, - "updater_username" : { + "instrument" : { "type" : "string" }, - "warning_interval" : { - "type" : "string" - } - }, - "type" : "object" - }, - "AlertConfigInstrument" : { - "properties" : { "instrument_id" : { "type" : "string" }, - "instrument_name" : { - "type" : "string" - } - }, - "type" : "object" - }, - "AlertSubscription" : { - "properties" : { - "alert_config_id" : { - "type" : "string" - }, - "id" : { + "instrument_slug" : { "type" : "string" }, - "mute_notify" : { + "is_computed" : { "type" : "boolean" }, - "mute_ui" : { - "type" : "boolean" + "name" : { + "type" : "string" }, - "profile_id" : { + "parameter" : { "type" : "string" - } - }, - "type" : "object" - }, - "AwareParameter" : { - "properties" : { - "id" : { + }, + "parameter_id" : { "type" : "string" }, - "key" : { + "slug" : { "type" : "string" }, - "parameter_id" : { + "type" : { + "$ref" : "#/components/schemas/db.TimeseriesType" + }, + "unit" : { "type" : "string" }, "unit_id" : { "type" : "string" - } - }, - "type" : "object" - }, - "AwarePlatformParameterConfig" : { - "properties" : { - "aware_id" : { - "type" : "string" }, - "aware_parameters" : { - "additionalProperties" : { - "type" : "string" - }, + "variable" : { "type" : "object" - }, - "instrument_id" : { - "type" : "string" } }, "type" : "object" }, - "CalculatedTimeseries" : { + "db.VUnit" : { "properties" : { - "formula" : { + "abbreviation" : { "type" : "string" }, - "formula_name" : { + "id" : { "type" : "string" }, - "id" : { + "measure" : { "type" : "string" }, - "instrument_id" : { + "measure_id" : { "type" : "string" }, - "parameter_id" : { + "name" : { "type" : "string" }, - "slug" : { + "unit_family" : { "type" : "string" }, - "unit_id" : { + "unit_family_id" : { "type" : "string" } }, "type" : "object" }, - "CollectionGroup" : { + "db.VUploaderConfig" : { "properties" : { - "create_date" : { - "type" : "string" + "column_offset" : { + "type" : "integer" }, - "creator_id" : { + "comment_field" : { "type" : "string" }, - "creator_username" : { + "comment_field_enabled" : { + "type" : "boolean" + }, + "created_at" : { "type" : "string" }, - "id" : { + "created_by" : { "type" : "string" }, - "name" : { + "created_by_username" : { "type" : "string" }, - "project_id" : { + "description" : { "type" : "string" }, - "slug" : { + "id" : { "type" : "string" }, - "update_date" : { + "masked_field" : { "type" : "string" }, - "updater_id" : { + "masked_field_enabled" : { + "type" : "boolean" + }, + "name" : { "type" : "string" }, - "updater_username" : { + "project_id" : { "type" : "string" - } - }, - "type" : "object" - }, - "CollectionGroupDetails" : { - "properties" : { - "create_date" : { + }, + "row_offset" : { + "type" : "integer" + }, + "slug" : { "type" : "string" }, - "creator_id" : { + "time_field" : { "type" : "string" }, - "creator_username" : { + "type" : { + "$ref" : "#/components/schemas/db.UploaderConfigType" + }, + "tz_name" : { "type" : "string" }, - "id" : { + "updated_at" : { "type" : "string" }, - "name" : { + "updated_by" : { "type" : "string" }, - "project_id" : { + "updated_by_username" : { "type" : "string" }, - "slug" : { + "validated_field" : { "type" : "string" }, - "timeseries" : { + "validated_field_enabled" : { + "type" : "boolean" + } + }, + "type" : "object" + }, + "dto.AlertConfig" : { + "properties" : { + "alert_email_subscriptions" : { "items" : { - "$ref" : "#/components/schemas/collectionGroupDetailsTimeseries" + "$ref" : "#/components/schemas/dto.EmailAutocompleteResult" }, "type" : "array" }, - "update_date" : { + "alert_type" : { "type" : "string" }, - "updater_id" : { + "alert_type_id" : { "type" : "string" }, - "updater_username" : { + "body" : { "type" : "string" - } - }, - "type" : "object" - }, - "Datalogger" : { - "properties" : { - "create_date" : { + }, + "created_at" : { "type" : "string" }, - "creator_id" : { + "created_by" : { "type" : "string" }, - "creator_username" : { + "created_by_username" : { "type" : "string" }, - "errors" : { + "id" : { + "type" : "string" + }, + "instruments" : { "items" : { - "type" : "string" + "$ref" : "#/components/schemas/dto.AlertConfigInstrument" }, "type" : "array" }, - "id" : { + "last_checked" : { "type" : "string" }, - "model" : { + "last_reminded" : { "type" : "string" }, - "model_id" : { - "type" : "string" + "mute_consecutive_alerts" : { + "type" : "boolean" }, "name" : { "type" : "string" @@ -12663,219 +14570,196 @@ "project_id" : { "type" : "string" }, - "slug" : { + "project_name" : { "type" : "string" }, - "sn" : { + "remind_interval" : { "type" : "string" }, - "tables" : { - "items" : { - "$ref" : "#/components/schemas/DataloggerTable" - }, - "type" : "array" + "schedule_interval" : { + "type" : "string" }, - "update_date" : { + "started_at" : { "type" : "string" }, - "updater_id" : { + "updated_by" : { "type" : "string" }, - "updater_username" : { + "updated_by_username" : { "type" : "string" - } - }, - "type" : "object" - }, - "DataloggerTable" : { - "properties" : { - "id" : { + }, + "updatedd_at" : { "type" : "string" }, - "table_name" : { + "warning_interval" : { "type" : "string" } }, "type" : "object" }, - "DataloggerTablePreview" : { + "dto.AlertConfigInstrument" : { "properties" : { - "datalogger_table_id" : { + "instrument_id" : { "type" : "string" }, - "preview" : { - "$ref" : "#/components/schemas/pgtype.JSON" - }, - "update_date" : { + "instrument_name" : { "type" : "string" } }, "type" : "object" }, - "DataloggerWithKey" : { + "dto.AlertSubscription" : { "properties" : { - "create_date" : { - "type" : "string" - }, - "creator_id" : { - "type" : "string" - }, - "creator_username" : { + "alert_config_id" : { "type" : "string" }, - "errors" : { - "items" : { - "type" : "string" - }, - "type" : "array" - }, "id" : { "type" : "string" }, - "key" : { - "type" : "string" + "mute_notify" : { + "type" : "boolean" }, - "model" : { - "type" : "string" + "mute_ui" : { + "type" : "boolean" }, - "model_id" : { + "profile_id" : { "type" : "string" - }, - "name" : { + } + }, + "type" : "object" + }, + "dto.CalculatedTimeseries" : { + "properties" : { + "formula" : { "type" : "string" }, - "project_id" : { + "formula_name" : { "type" : "string" }, - "slug" : { + "id" : { "type" : "string" }, - "sn" : { + "instrument_id" : { "type" : "string" }, - "tables" : { - "items" : { - "$ref" : "#/components/schemas/DataloggerTable" - }, - "type" : "array" - }, - "update_date" : { + "parameter_id" : { "type" : "string" }, - "updater_id" : { + "slug" : { "type" : "string" }, - "updater_username" : { + "unit_id" : { "type" : "string" } }, "type" : "object" }, - "District" : { + "dto.CollectionGroup" : { "properties" : { - "agency" : { + "created_at" : { "type" : "string" }, - "division_initials" : { + "created_by" : { "type" : "string" }, - "division_name" : { + "created_by_username" : { "type" : "string" }, "id" : { "type" : "string" }, - "initials" : { + "name" : { "type" : "string" }, - "name" : { + "project_id" : { "type" : "string" }, - "office_id" : { + "slug" : { + "type" : "string" + }, + "sort_order" : { + "type" : "integer" + }, + "updated_by" : { + "type" : "string" + }, + "updated_by_username" : { + "type" : "string" + }, + "updatedd_at" : { "type" : "string" } }, "type" : "object" }, - "DistrictRollup" : { + "dto.Datalogger" : { "properties" : { - "actual_total_submittals" : { - "type" : "integer" - }, - "alert_type_id" : { + "created_at" : { "type" : "string" }, - "district_initials" : { + "created_by" : { "type" : "string" }, - "expected_total_submittals" : { - "type" : "integer" + "created_by_username" : { + "type" : "string" }, - "green_submittals" : { - "type" : "integer" + "errors" : { + "items" : { + "type" : "string" + }, + "type" : "array" }, - "month" : { + "id" : { "type" : "string" }, - "office_id" : { + "model" : { "type" : "string" }, - "project_id" : { + "model_id" : { "type" : "string" }, - "project_name" : { + "name" : { "type" : "string" }, - "red_submittals" : { - "type" : "integer" + "project_id" : { + "type" : "string" }, - "yellow_submittals" : { - "type" : "integer" - } - }, - "type" : "object" - }, - "Domain" : { - "properties" : { - "description" : { + "slug" : { "type" : "string" }, - "group" : { + "sn" : { "type" : "string" }, - "id" : { + "tables" : { + "items" : { + "$ref" : "#/components/schemas/dto.DataloggerTable" + }, + "type" : "array" + }, + "updated_by" : { "type" : "string" }, - "value" : { + "updated_by_username" : { + "type" : "string" + }, + "updatedd_at" : { "type" : "string" } }, "type" : "object" }, - "DomainGroupOption" : { + "dto.DataloggerTable" : { "properties" : { - "description" : { - "type" : "string" - }, "id" : { "type" : "string" }, - "value" : { + "table_name" : { "type" : "string" } }, "type" : "object" }, - "DomainMap" : { - "additionalProperties" : { - "items" : { - "$ref" : "#/components/schemas/DomainGroupOption" - }, - "type" : "array" - }, - "type" : "object" - }, - "EmailAutocompleteResult" : { + "dto.EmailAutocompleteResult" : { "properties" : { "email" : { "type" : "string" @@ -12892,7 +14776,7 @@ }, "type" : "object" }, - "EquivalencyTable" : { + "dto.EquivalencyTable" : { "properties" : { "datalogger_id" : { "type" : "string" @@ -12905,14 +14789,14 @@ }, "rows" : { "items" : { - "$ref" : "#/components/schemas/EquivalencyTableRow" + "$ref" : "#/components/schemas/dto.EquivalencyTableRow" }, "type" : "array" } }, "type" : "object" }, - "EquivalencyTableRow" : { + "dto.EquivalencyTableRow" : { "properties" : { "display_name" : { "type" : "string" @@ -12932,7 +14816,7 @@ }, "type" : "object" }, - "Evaluation" : { + "dto.Evaluation" : { "properties" : { "alert_config_id" : { "type" : "string" @@ -12943,16 +14827,16 @@ "body" : { "type" : "string" }, - "create_date" : { + "created_at" : { "type" : "string" }, - "creator_id" : { + "created_by" : { "type" : "string" }, - "creator_username" : { + "created_by_username" : { "type" : "string" }, - "end_date" : { + "ended_at" : { "type" : "string" }, "id" : { @@ -12960,7 +14844,7 @@ }, "instruments" : { "items" : { - "$ref" : "#/components/schemas/EvaluationInstrument" + "$ref" : "#/components/schemas/dto.EvaluationInstrument" }, "type" : "array" }, @@ -12973,25 +14857,25 @@ "project_name" : { "type" : "string" }, - "start_date" : { + "started_at" : { "type" : "string" }, "submittal_id" : { "type" : "string" }, - "update_date" : { + "updated_by" : { "type" : "string" }, - "updater_id" : { + "updated_by_username" : { "type" : "string" }, - "updater_username" : { + "updatedd_at" : { "type" : "string" } }, "type" : "object" }, - "EvaluationInstrument" : { + "dto.EvaluationInstrument" : { "properties" : { "instrument_id" : { "type" : "string" @@ -13002,52 +14886,7 @@ }, "type" : "object" }, - "Geometry" : { - "properties" : { - "coordinates" : { - "type" : "object" - }, - "geometries" : { - "items" : { - "$ref" : "#/components/schemas/geojson.Geometry" - }, - "type" : "array" - }, - "type" : { - "type" : "string" - } - }, - "type" : "object" - }, - "Heartbeat" : { - "properties" : { - "time" : { - "type" : "string" - } - }, - "type" : "object" - }, - "Home" : { - "properties" : { - "instrument_count" : { - "type" : "integer" - }, - "instrument_group_count" : { - "type" : "integer" - }, - "new_instruments_7d" : { - "type" : "integer" - }, - "new_measurements_2h" : { - "type" : "integer" - }, - "project_count" : { - "type" : "integer" - } - }, - "type" : "object" - }, - "IDSlugName" : { + "dto.IDSlugName" : { "properties" : { "id" : { "type" : "string" @@ -13061,75 +14900,33 @@ }, "type" : "object" }, - "InclinometerMeasurement" : { + "dto.InclSegment" : { "properties" : { - "create_date" : { + "a0_timeseries_id" : { "type" : "string" }, - "creator" : { + "a180_timeseries_id" : { "type" : "string" }, - "time" : { + "b0_timeseries_id" : { "type" : "string" }, - "values" : { - "items" : { - "type" : "integer" - }, - "type" : "array" - } - }, - "type" : "object" - }, - "InclinometerMeasurementCollection" : { - "properties" : { - "inclinometers" : { - "items" : { - "$ref" : "#/components/schemas/InclinometerMeasurement" - }, - "type" : "array" - }, - "timeseries_id" : { + "b180_timeseries_id" : { "type" : "string" - } - }, - "type" : "object" - }, - "InclinometerMeasurementCollectionCollection" : { - "properties" : { - "items" : { - "items" : { - "$ref" : "#/components/schemas/InclinometerMeasurementCollection" - }, - "type" : "array" - } - }, - "type" : "object" - }, - "InclinometerMeasurementCollectionLean" : { - "properties" : { - "items" : { - "items" : { - "$ref" : "#/components/schemas/InclinometerMeasurementLean" - }, - "type" : "array" }, - "timeseries_id" : { + "depth_timeseries_id" : { "type" : "string" - } - }, - "type" : "object" - }, - "InclinometerMeasurementLean" : { - "additionalProperties" : { - "items" : { + }, + "id" : { "type" : "integer" }, - "type" : "array" + "instrument_id" : { + "type" : "string" + } }, "type" : "object" }, - "Instrument" : { + "dto.Instrument" : { "properties" : { "alert_configs" : { "items" : { @@ -13146,17 +14943,20 @@ }, "type" : "array" }, - "create_date" : { + "created_at" : { "type" : "string" }, - "creator_id" : { + "created_by" : { "type" : "string" }, - "creator_username" : { + "created_by_username" : { "type" : "string" }, "geometry" : { - "$ref" : "#/components/schemas/Geometry" + "items" : { + "type" : "integer" + }, + "type" : "array" }, "groups" : { "items" : { @@ -13183,11 +14983,11 @@ "type" : "integer" }, "opts" : { - "$ref" : "#/components/schemas/Opts" + "$ref" : "#/components/schemas/dto.Opts" }, "projects" : { "items" : { - "$ref" : "#/components/schemas/IDSlugName" + "$ref" : "#/components/schemas/dto.IDSlugName" }, "type" : "array" }, @@ -13215,13 +15015,13 @@ "type_id" : { "type" : "string" }, - "update_date" : { + "updated_by" : { "type" : "string" }, - "updater_id" : { + "updated_by_username" : { "type" : "string" }, - "updater_username" : { + "updatedd_at" : { "type" : "string" }, "usgs_id" : { @@ -13230,23 +15030,15 @@ }, "type" : "object" }, - "InstrumentCount" : { - "properties" : { - "instrument_count" : { - "type" : "integer" - } - }, - "type" : "object" - }, - "InstrumentGroup" : { + "dto.InstrumentGroup" : { "properties" : { - "create_date" : { + "created_at" : { "type" : "string" }, - "creator_id" : { + "created_by" : { "type" : "string" }, - "creator_username" : { + "created_by_username" : { "type" : "string" }, "description" : { @@ -13270,30 +15062,30 @@ "timeseries_count" : { "type" : "integer" }, - "update_date" : { + "updated_by" : { "type" : "string" }, - "updater_id" : { + "updated_by_username" : { "type" : "string" }, - "updater_username" : { + "updatedd_at" : { "type" : "string" } }, "type" : "object" }, - "InstrumentNote" : { + "dto.InstrumentNote" : { "properties" : { "body" : { "type" : "string" }, - "create_date" : { + "created_at" : { "type" : "string" }, - "creator_id" : { + "created_by" : { "type" : "string" }, - "creator_username" : { + "created_by_username" : { "type" : "string" }, "id" : { @@ -13308,30 +15100,30 @@ "title" : { "type" : "string" }, - "update_date" : { + "updated_by" : { "type" : "string" }, - "updater_id" : { + "updated_by_username" : { "type" : "string" }, - "updater_username" : { + "updatedd_at" : { "type" : "string" } }, "type" : "object" }, - "InstrumentNoteCollection" : { + "dto.InstrumentNoteCollection" : { "properties" : { "items" : { "items" : { - "$ref" : "#/components/schemas/InstrumentNote" + "$ref" : "#/components/schemas/dto.InstrumentNote" }, "type" : "array" } }, "type" : "object" }, - "InstrumentProjectAssignments" : { + "dto.InstrumentProjectAssignments" : { "properties" : { "project_ids" : { "items" : { @@ -13342,7 +15134,7 @@ }, "type" : "object" }, - "InstrumentStatus" : { + "dto.InstrumentStatus" : { "properties" : { "id" : { "type" : "string" @@ -13359,46 +15151,18 @@ }, "type" : "object" }, - "InstrumentStatusCollection" : { + "dto.InstrumentStatusCollection" : { "properties" : { "items" : { "items" : { - "$ref" : "#/components/schemas/InstrumentStatus" - }, - "type" : "array" - } - }, - "type" : "object" - }, - "InstrumentsValidation" : { - "properties" : { - "errors" : { - "items" : { - "type" : "string" - }, - "type" : "array" - }, - "is_valid" : { - "type" : "boolean" - } - }, - "type" : "object" - }, - "IpiMeasurements" : { - "properties" : { - "measurements" : { - "items" : { - "$ref" : "#/components/schemas/IpiSegmentMeasurement" + "$ref" : "#/components/schemas/dto.InstrumentStatus" }, "type" : "array" - }, - "time" : { - "type" : "string" } }, "type" : "object" }, - "IpiSegment" : { + "dto.IpiSegment" : { "properties" : { "id" : { "type" : "integer" @@ -13424,30 +15188,7 @@ }, "type" : "object" }, - "IpiSegmentMeasurement" : { - "properties" : { - "cum_dev" : { - "type" : "number" - }, - "elevation" : { - "type" : "number" - }, - "inc_dev" : { - "type" : "number" - }, - "segment_id" : { - "type" : "integer" - }, - "temp" : { - "type" : "number" - }, - "tilt" : { - "type" : "number" - } - }, - "type" : "object" - }, - "Measurement" : { + "dto.Measurement" : { "properties" : { "annotation" : { "type" : "string" @@ -13461,137 +15202,52 @@ "time" : { "type" : "string" }, - "validated" : { - "type" : "boolean" - }, - "value" : { - "type" : "number" - } - }, - "type" : "object" - }, - "MeasurementCollection" : { - "properties" : { - "items" : { - "items" : { - "$ref" : "#/components/schemas/Measurement" - }, - "type" : "array" - }, - "timeseries_id" : { - "type" : "string" - } - }, - "type" : "object" - }, - "MeasurementCollectionLean" : { - "properties" : { - "items" : { - "items" : { - "$ref" : "#/components/schemas/MeasurementLean" - }, - "type" : "array" - }, - "timeseries_id" : { - "type" : "string" - } - }, - "type" : "object" - }, - "MeasurementLean" : { - "additionalProperties" : { - "type" : "number" - }, - "type" : "object" - }, - "Opts" : { - "additionalProperties" : true, - "type" : "object" - }, - "PlotConfig" : { - "properties" : { - "auto_range" : { - "type" : "boolean" - }, - "create_date" : { - "type" : "string" - }, - "creator_id" : { - "type" : "string" - }, - "creator_username" : { - "type" : "string" - }, - "date_range" : { - "type" : "string" - }, - "display" : { - "$ref" : "#/components/schemas/Opts" - }, - "id" : { - "type" : "string" - }, - "name" : { - "type" : "string" - }, - "plot_type" : { - "type" : "string" - }, - "project_id" : { - "type" : "string" - }, - "report_configs" : { - "items" : { - "$ref" : "#/components/schemas/IDSlugName" - }, - "type" : "array" - }, - "show_comments" : { - "type" : "boolean" - }, - "show_masked" : { - "type" : "boolean" - }, - "show_nonvalidated" : { - "type" : "boolean" - }, - "slug" : { - "type" : "string" - }, - "threshold" : { - "type" : "integer" - }, - "update_date" : { - "type" : "string" + "validated" : { + "type" : "boolean" }, - "updater_id" : { - "type" : "string" + "value" : { + "type" : "number" + } + }, + "type" : "object" + }, + "dto.MeasurementCollection" : { + "properties" : { + "items" : { + "items" : { + "$ref" : "#/components/schemas/dto.Measurement" + }, + "type" : "array" }, - "updater_username" : { + "timeseries_id" : { "type" : "string" } }, "type" : "object" }, - "PlotConfigBullseyePlot" : { + "dto.Opts" : { + "additionalProperties" : true, + "type" : "object" + }, + "dto.PlotConfigBullseyePlot" : { "properties" : { "auto_range" : { "type" : "boolean" }, - "create_date" : { + "created_at" : { "type" : "string" }, - "creator_id" : { + "created_by" : { "type" : "string" }, - "creator_username" : { + "created_by_username" : { "type" : "string" }, "date_range" : { "type" : "string" }, "display" : { - "$ref" : "#/components/schemas/PlotConfigBullseyePlotDisplay" + "$ref" : "#/components/schemas/dto.PlotConfigBullseyePlotDisplay" }, "id" : { "type" : "string" @@ -13607,7 +15263,7 @@ }, "report_configs" : { "items" : { - "$ref" : "#/components/schemas/IDSlugName" + "$ref" : "#/components/schemas/dto.IDSlugName" }, "type" : "array" }, @@ -13626,19 +15282,19 @@ "threshold" : { "type" : "integer" }, - "update_date" : { + "updated_by" : { "type" : "string" }, - "updater_id" : { + "updated_by_username" : { "type" : "string" }, - "updater_username" : { + "updatedd_at" : { "type" : "string" } }, "type" : "object" }, - "PlotConfigBullseyePlotDisplay" : { + "dto.PlotConfigBullseyePlotDisplay" : { "properties" : { "x_axis_timeseries_id" : { "type" : "string" @@ -13649,25 +15305,25 @@ }, "type" : "object" }, - "PlotConfigContourPlot" : { + "dto.PlotConfigContourPlot" : { "properties" : { "auto_range" : { "type" : "boolean" }, - "create_date" : { + "created_at" : { "type" : "string" }, - "creator_id" : { + "created_by" : { "type" : "string" }, - "creator_username" : { + "created_by_username" : { "type" : "string" }, "date_range" : { "type" : "string" }, "display" : { - "$ref" : "#/components/schemas/PlotConfigContourPlotDisplay" + "$ref" : "#/components/schemas/dto.PlotConfigContourPlotDisplay" }, "id" : { "type" : "string" @@ -13683,7 +15339,7 @@ }, "report_configs" : { "items" : { - "$ref" : "#/components/schemas/IDSlugName" + "$ref" : "#/components/schemas/dto.IDSlugName" }, "type" : "array" }, @@ -13702,19 +15358,19 @@ "threshold" : { "type" : "integer" }, - "update_date" : { + "updated_by" : { "type" : "string" }, - "updater_id" : { + "updated_by_username" : { "type" : "string" }, - "updater_username" : { + "updatedd_at" : { "type" : "string" } }, "type" : "object" }, - "PlotConfigContourPlotDisplay" : { + "dto.PlotConfigContourPlotDisplay" : { "properties" : { "contour_smoothing" : { "type" : "boolean" @@ -13740,39 +15396,25 @@ }, "type" : "object" }, - "PlotConfigMeasurementBullseyePlot" : { - "properties" : { - "time" : { - "type" : "string" - }, - "x" : { - "type" : "number" - }, - "y" : { - "type" : "number" - } - }, - "type" : "object" - }, - "PlotConfigProfilePlot" : { + "dto.PlotConfigProfilePlot" : { "properties" : { "auto_range" : { "type" : "boolean" }, - "create_date" : { + "created_at" : { "type" : "string" }, - "creator_id" : { + "created_by" : { "type" : "string" }, - "creator_username" : { + "created_by_username" : { "type" : "string" }, "date_range" : { "type" : "string" }, "display" : { - "$ref" : "#/components/schemas/PlotConfigProfilePlotDisplay" + "$ref" : "#/components/schemas/dto.PlotConfigProfilePlotDisplay" }, "id" : { "type" : "string" @@ -13788,7 +15430,7 @@ }, "report_configs" : { "items" : { - "$ref" : "#/components/schemas/IDSlugName" + "$ref" : "#/components/schemas/dto.IDSlugName" }, "type" : "array" }, @@ -13807,19 +15449,19 @@ "threshold" : { "type" : "integer" }, - "update_date" : { + "updated_by" : { "type" : "string" }, - "updater_id" : { + "updated_by_username" : { "type" : "string" }, - "updater_username" : { + "updatedd_at" : { "type" : "string" } }, "type" : "object" }, - "PlotConfigProfilePlotDisplay" : { + "dto.PlotConfigProfilePlotDisplay" : { "properties" : { "instrument_id" : { "type" : "string" @@ -13830,7 +15472,7 @@ }, "type" : "object" }, - "PlotConfigScatterLineCustomShape" : { + "dto.PlotConfigScatterLineCustomShape" : { "properties" : { "color" : { "type" : "string" @@ -13850,25 +15492,25 @@ }, "type" : "object" }, - "PlotConfigScatterLineDisplay" : { + "dto.PlotConfigScatterLineDisplay" : { "properties" : { "layout" : { - "$ref" : "#/components/schemas/PlotConfigScatterLineLayout" + "$ref" : "#/components/schemas/dto.PlotConfigScatterLineLayout" }, "traces" : { "items" : { - "$ref" : "#/components/schemas/PlotConfigScatterLineTimeseriesTrace" + "$ref" : "#/components/schemas/dto.PlotConfigScatterLineTimeseriesTrace" }, "type" : "array" } }, "type" : "object" }, - "PlotConfigScatterLineLayout" : { + "dto.PlotConfigScatterLineLayout" : { "properties" : { "custom_shapes" : { "items" : { - "$ref" : "#/components/schemas/PlotConfigScatterLineCustomShape" + "$ref" : "#/components/schemas/dto.PlotConfigScatterLineCustomShape" }, "type" : "array" }, @@ -13881,25 +15523,25 @@ }, "type" : "object" }, - "PlotConfigScatterLinePlot" : { + "dto.PlotConfigScatterLinePlot" : { "properties" : { "auto_range" : { "type" : "boolean" }, - "create_date" : { + "created_at" : { "type" : "string" }, - "creator_id" : { + "created_by" : { "type" : "string" }, - "creator_username" : { + "created_by_username" : { "type" : "string" }, "date_range" : { "type" : "string" }, "display" : { - "$ref" : "#/components/schemas/PlotConfigScatterLineDisplay" + "$ref" : "#/components/schemas/dto.PlotConfigScatterLineDisplay" }, "id" : { "type" : "string" @@ -13915,7 +15557,7 @@ }, "report_configs" : { "items" : { - "$ref" : "#/components/schemas/IDSlugName" + "$ref" : "#/components/schemas/dto.IDSlugName" }, "type" : "array" }, @@ -13934,19 +15576,19 @@ "threshold" : { "type" : "integer" }, - "update_date" : { + "updated_by" : { "type" : "string" }, - "updater_id" : { + "updated_by_username" : { "type" : "string" }, - "updater_username" : { + "updatedd_at" : { "type" : "string" } }, "type" : "object" }, - "PlotConfigScatterLineTimeseriesTrace" : { + "dto.PlotConfigScatterLineTimeseriesTrace" : { "properties" : { "color" : { "type" : "string" @@ -13987,47 +15629,15 @@ }, "type" : "object" }, - "Profile" : { - "properties" : { - "display_name" : { - "type" : "string" - }, - "email" : { - "type" : "string" - }, - "id" : { - "type" : "string" - }, - "is_admin" : { - "type" : "boolean" - }, - "roles" : { - "items" : { - "type" : "string" - }, - "type" : "array" - }, - "tokens" : { - "items" : { - "$ref" : "#/components/schemas/TokenInfoProfile" - }, - "type" : "array" - }, - "username" : { - "type" : "string" - } - }, - "type" : "object" - }, - "Project" : { + "dto.Project" : { "properties" : { - "create_date" : { + "created_at" : { "type" : "string" }, - "creator_id" : { + "created_by" : { "type" : "string" }, - "creator_username" : { + "created_by_username" : { "type" : "string" }, "district_id" : { @@ -14057,27 +15667,19 @@ "slug" : { "type" : "string" }, - "update_date" : { + "updated_by" : { "type" : "string" }, - "updater_id" : { + "updated_by_username" : { "type" : "string" }, - "updater_username" : { + "updatedd_at" : { "type" : "string" } }, "type" : "object" }, - "ProjectCount" : { - "properties" : { - "project_count" : { - "type" : "integer" - } - }, - "type" : "object" - }, - "ProjectInstrumentAssignments" : { + "dto.ProjectInstrumentAssignments" : { "properties" : { "instrument_ids" : { "items" : { @@ -14088,38 +15690,15 @@ }, "type" : "object" }, - "ProjectMembership" : { - "properties" : { - "email" : { - "type" : "string" - }, - "id" : { - "type" : "string" - }, - "profile_id" : { - "type" : "string" - }, - "role" : { - "type" : "string" - }, - "role_id" : { - "type" : "string" - }, - "username" : { - "type" : "string" - } - }, - "type" : "object" - }, - "ReportConfig" : { + "dto.ReportConfig" : { "properties" : { - "create_date" : { + "created_at" : { "type" : "string" }, - "creator_id" : { + "created_by" : { "type" : "string" }, - "creator_username" : { + "created_by_username" : { "type" : "string" }, "description" : { @@ -14129,7 +15708,7 @@ "type" : "string" }, "global_overrides" : { - "$ref" : "#/components/schemas/ReportConfigGlobalOverrides" + "$ref" : "#/components/schemas/dto.ReportConfigGlobalOverrides" }, "id" : { "type" : "string" @@ -14139,7 +15718,7 @@ }, "plot_configs" : { "items" : { - "$ref" : "#/components/schemas/IDSlugName" + "$ref" : "#/components/schemas/dto.IDSlugName" }, "type" : "array" }, @@ -14152,91 +15731,38 @@ "slug" : { "type" : "string" }, - "update_date" : { + "updated_by" : { "type" : "string" }, - "updater_id" : { + "updated_by_username" : { "type" : "string" }, - "updater_username" : { + "updatedd_at" : { "type" : "string" } }, "type" : "object" }, - "ReportConfigGlobalOverrides" : { + "dto.ReportConfigGlobalOverrides" : { "properties" : { "date_range" : { - "$ref" : "#/components/schemas/TextOption" + "$ref" : "#/components/schemas/dto.TextOption" }, "show_masked" : { - "$ref" : "#/components/schemas/ToggleOption" + "$ref" : "#/components/schemas/dto.ToggleOption" }, "show_nonvalidated" : { - "$ref" : "#/components/schemas/ToggleOption" - } - }, - "type" : "object" - }, - "ReportConfigWithPlotConfigs" : { - "properties" : { - "create_date" : { - "type" : "string" - }, - "creator_id" : { - "type" : "string" - }, - "creator_username" : { - "type" : "string" - }, - "description" : { - "type" : "string" - }, - "district_name" : { - "type" : "string" - }, - "global_overrides" : { - "$ref" : "#/components/schemas/ReportConfigGlobalOverrides" - }, - "id" : { - "type" : "string" - }, - "name" : { - "type" : "string" - }, - "plot_configs" : { - "items" : { - "$ref" : "#/components/schemas/PlotConfigScatterLinePlot" - }, - "type" : "array" - }, - "project_id" : { - "type" : "string" - }, - "project_name" : { - "type" : "string" - }, - "slug" : { - "type" : "string" - }, - "update_date" : { - "type" : "string" - }, - "updater_id" : { - "type" : "string" - }, - "updater_username" : { - "type" : "string" + "$ref" : "#/components/schemas/dto.ToggleOption" } }, "type" : "object" }, - "ReportDownloadJob" : { + "dto.ReportDownloadJob" : { "properties" : { - "create_date" : { + "created_at" : { "type" : "string" }, - "creator" : { + "created_by" : { "type" : "string" }, "file_expiry" : { @@ -14251,195 +15777,48 @@ "progress" : { "type" : "integer" }, - "progress_update_date" : { - "type" : "string" - }, - "report_config_id" : { - "type" : "string" - }, - "status" : { - "type" : "string" - } - }, - "type" : "object" - }, - "SaaMeasurements" : { - "properties" : { - "measurements" : { - "items" : { - "$ref" : "#/components/schemas/SaaSegmentMeasurement" - }, - "type" : "array" - }, - "time" : { - "type" : "string" - } - }, - "type" : "object" - }, - "SaaSegment" : { - "properties" : { - "id" : { - "type" : "integer" - }, - "instrument_id" : { - "type" : "string" - }, - "length" : { - "type" : "number" - }, - "length_timeseries_id" : { - "type" : "string" - }, - "temp_timeseries_id" : { - "type" : "string" - }, - "x_timeseries_id" : { - "type" : "string" - }, - "y_timeseries_id" : { - "type" : "string" - }, - "z_timeseries_id" : { - "type" : "string" - } - }, - "type" : "object" - }, - "SaaSegmentMeasurement" : { - "properties" : { - "elevation" : { - "type" : "number" - }, - "segment_id" : { - "type" : "integer" - }, - "temp" : { - "type" : "number" - }, - "temp_cum_dev" : { - "type" : "number" - }, - "temp_increment" : { - "type" : "number" - }, - "x" : { - "type" : "number" - }, - "x_cum_dev" : { - "type" : "number" - }, - "x_increment" : { - "type" : "number" - }, - "y" : { - "type" : "number" - }, - "y_cum_dev" : { - "type" : "number" - }, - "y_increment" : { - "type" : "number" - }, - "z" : { - "type" : "number" - }, - "z_cum_dev" : { - "type" : "number" - }, - "z_increment" : { - "type" : "number" - } - }, - "type" : "object" - }, - "SearchResult" : { - "properties" : { - "id" : { - "type" : "string" - }, - "item" : { - "type" : "object" - }, - "type" : { - "type" : "string" - } - }, - "type" : "object" - }, - "Site" : { - "properties" : { - "description" : { - "type" : "string" - }, - "elevation" : { + "progress_updated_at" : { "type" : "string" }, - "elevationUnits" : { - "type" : "string" - }, - "siteName" : { - "$ref" : "#/components/schemas/SiteName" - } - }, - "type" : "object" - }, - "SiteName" : { - "properties" : { - "id" : { + "report_config_id" : { "type" : "string" }, - "nameType" : { + "status" : { "type" : "string" } }, "type" : "object" }, - "Submittal" : { + "dto.SaaSegment" : { "properties" : { - "alert_config_id" : { - "type" : "string" - }, - "alert_config_name" : { - "type" : "string" - }, - "alert_type_id" : { - "type" : "string" - }, - "alert_type_name" : { - "type" : "string" + "id" : { + "type" : "integer" }, - "completion_date" : { + "instrument_id" : { "type" : "string" }, - "create_date" : { - "type" : "string" + "length" : { + "type" : "number" }, - "due_date" : { + "length_timeseries_id" : { "type" : "string" }, - "id" : { + "temp_timeseries_id" : { "type" : "string" }, - "marked_as_missing" : { - "type" : "boolean" - }, - "project_id" : { + "x_timeseries_id" : { "type" : "string" }, - "submittal_status_id" : { + "y_timeseries_id" : { "type" : "string" }, - "submittal_status_name" : { + "z_timeseries_id" : { "type" : "string" - }, - "warning_sent" : { - "type" : "boolean" } }, "type" : "object" }, - "TextOption" : { + "dto.TextOption" : { "properties" : { "enabled" : { "type" : "boolean" @@ -14450,7 +15829,7 @@ }, "type" : "object" }, - "Timeseries" : { + "dto.Timeseries" : { "properties" : { "id" : { "type" : "string" @@ -14490,7 +15869,7 @@ }, "values" : { "items" : { - "$ref" : "#/components/schemas/Measurement" + "$ref" : "#/components/schemas/dto.Measurement" }, "type" : "array" }, @@ -14500,18 +15879,18 @@ }, "type" : "object" }, - "TimeseriesCollectionItems" : { + "dto.TimeseriesCollectionItems" : { "properties" : { "items" : { "items" : { - "$ref" : "#/components/schemas/Timeseries" + "$ref" : "#/components/schemas/dto.Timeseries" }, "type" : "array" } }, "type" : "object" }, - "TimeseriesCwms" : { + "dto.TimeseriesCwms" : { "properties" : { "cwms_extent_earliest_time" : { "type" : "string" @@ -14563,7 +15942,7 @@ }, "values" : { "items" : { - "$ref" : "#/components/schemas/Measurement" + "$ref" : "#/components/schemas/dto.Measurement" }, "type" : "array" }, @@ -14573,18 +15952,18 @@ }, "type" : "object" }, - "TimeseriesMeasurementCollectionCollection" : { + "dto.TimeseriesMeasurementCollectionCollection" : { "properties" : { "items" : { "items" : { - "$ref" : "#/components/schemas/MeasurementCollection" + "$ref" : "#/components/schemas/dto.MeasurementCollection" }, "type" : "array" } }, "type" : "object" }, - "ToggleOption" : { + "dto.ToggleOption" : { "properties" : { "enabled" : { "type" : "boolean" @@ -14595,134 +15974,324 @@ }, "type" : "object" }, - "Token" : { + "dto.UploaderConfig" : { "properties" : { - "issued" : { + "column_offset" : { + "type" : "integer" + }, + "comment_field" : { "type" : "string" }, - "profile_id" : { + "comment_field_enabled" : { + "type" : "boolean" + }, + "created_at" : { "type" : "string" }, - "secret_token" : { + "created_by" : { "type" : "string" }, - "token_id" : { + "created_by_username" : { + "type" : "string" + }, + "description" : { + "type" : "string" + }, + "id" : { + "type" : "string" + }, + "masked_field" : { + "type" : "string" + }, + "masked_field_enabled" : { + "type" : "boolean" + }, + "name" : { + "type" : "string" + }, + "project_id" : { + "type" : "string" + }, + "row_offset" : { + "type" : "integer" + }, + "slug" : { + "type" : "string" + }, + "time_field" : { + "type" : "string" + }, + "type" : { + "$ref" : "#/components/schemas/dto.UploaderConfigType" + }, + "tz_name" : { + "type" : "string" + }, + "updated_by" : { + "type" : "string" + }, + "updated_by_username" : { + "type" : "string" + }, + "updatedd_at" : { + "type" : "string" + }, + "validated_field" : { "type" : "string" + }, + "validated_field_enabled" : { + "type" : "boolean" } }, "type" : "object" }, - "TokenInfoProfile" : { + "dto.UploaderConfigMapping" : { "properties" : { - "issued" : { + "field_name" : { "type" : "string" }, - "token_id" : { + "timeseries_id" : { "type" : "string" } }, "type" : "object" }, - "Unit" : { + "dto.UploaderConfigType" : { + "enum" : [ "csv", "dux", "toa5" ], + "type" : "string", + "x-enum-varnames" : [ "CSV", "DUX", "TOA5" ] + }, + "service.AggregatePlotConfigMeasurementsContourPlot" : { "properties" : { - "abbreviation" : { + "x" : { + "items" : { + "type" : "number" + }, + "type" : "array" + }, + "y" : { + "items" : { + "type" : "number" + }, + "type" : "array" + }, + "z" : { + "items" : { + "type" : "number" + }, + "type" : "array" + } + }, + "type" : "object" + }, + "service.AwarePlatformParameterConfig" : { + "properties" : { + "aware_id" : { + "type" : "string" + }, + "aware_parameters" : { + "additionalProperties" : { + "type" : "string" + }, + "type" : "object" + }, + "instrument_id" : { + "type" : "string" + } + }, + "type" : "object" + }, + "service.DataloggerWithKey" : { + "properties" : { + "created_at" : { + "type" : "string" + }, + "created_by" : { + "type" : "string" + }, + "created_by_username" : { "type" : "string" }, + "errors" : { + "items" : { + "type" : "string" + }, + "type" : "array" + }, "id" : { "type" : "string" }, - "measure" : { + "key" : { "type" : "string" }, - "measure_id" : { + "model" : { + "type" : "string" + }, + "model_id" : { "type" : "string" }, "name" : { "type" : "string" }, - "unit_family" : { + "project_id" : { "type" : "string" }, - "unit_family_id" : { + "slug" : { + "type" : "string" + }, + "sn" : { + "type" : "string" + }, + "tables" : { + "items" : { + "$ref" : "#/components/schemas/db.DataloggerTableIDName" + }, + "type" : "array" + }, + "updated_at" : { + "type" : "string" + }, + "updated_by" : { + "type" : "string" + }, + "updated_by_username" : { + "type" : "string" + } + }, + "type" : "object" + }, + "service.DomainMap" : { + "additionalProperties" : { + "items" : { + "$ref" : "#/components/schemas/db.DomainGroupOpt" + }, + "type" : "array" + }, + "type" : "object" + }, + "service.Healthcheck" : { + "properties" : { + "status" : { "type" : "string" } }, "type" : "object" }, - "collectionGroupDetailsTimeseries" : { + "service.Heartbeat" : { "properties" : { - "id" : { + "time" : { "type" : "string" + } + }, + "type" : "object" + }, + "service.InstrumentsValidation" : { + "properties" : { + "errors" : { + "items" : { + "type" : "string" + }, + "type" : "array" }, - "instrument" : { + "is_valid" : { + "type" : "boolean" + } + }, + "type" : "object" + }, + "service.ProjectCount" : { + "properties" : { + "project_count" : { + "type" : "integer" + } + }, + "type" : "object" + }, + "service.ReportConfigWithPlotConfigs" : { + "properties" : { + "created_at" : { "type" : "string" }, - "instrument_id" : { + "created_by" : { "type" : "string" }, - "instrument_slug" : { + "created_by_username" : { "type" : "string" }, - "is_computed" : { - "type" : "boolean" + "description" : { + "type" : "string" }, - "latest_time" : { + "district_name" : { "type" : "string" }, - "latest_value" : { - "type" : "number" + "global_overrides" : { + "$ref" : "#/components/schemas/db.ReportConfigGlobalOverrides" + }, + "id" : { + "type" : "string" }, "name" : { "type" : "string" }, - "parameter" : { + "plot_configs" : { + "items" : { + "$ref" : "#/components/schemas/db.VPlotConfiguration" + }, + "type" : "array" + }, + "project_id" : { "type" : "string" }, - "parameter_id" : { + "project_name" : { "type" : "string" }, "slug" : { "type" : "string" }, - "type" : { + "updated_at" : { "type" : "string" }, - "unit" : { + "updated_by" : { "type" : "string" }, - "unit_id" : { + "updated_by_username" : { + "type" : "string" + } + }, + "type" : "object" + }, + "service.Token" : { + "properties" : { + "hash" : { "type" : "string" }, - "values" : { - "items" : { - "$ref" : "#/components/schemas/Measurement" - }, - "type" : "array" + "id" : { + "type" : "string" }, - "variable" : { + "issued" : { + "type" : "string" + }, + "profile_id" : { + "type" : "string" + }, + "secret_token" : { + "type" : "string" + }, + "token_id" : { "type" : "string" } }, "type" : "object" }, - "pgtype.JSON" : { + "_timeseries_measurements_post_request" : { "properties" : { - "bytes" : { - "items" : { - "type" : "integer" - }, - "type" : "array" - }, - "status" : { - "$ref" : "#/components/schemas/pgtype.Status" + "timeseries_measurement_collections" : { + "description" : "TOA5 file of timeseries measurement collections", + "format" : "binary", + "type" : "string" } }, "type" : "object" - }, - "pgtype.Status" : { - "enum" : [ 0, 1, 2 ], - "type" : "integer", - "x-enum-varnames" : [ "Undefined", "Null", "Present" ] } }, "securitySchemes" : { diff --git a/api/internal/server/docs/openapi.yaml b/api/internal/server/docs/openapi.yaml index e6bde2c5..2e342ba3 100644 --- a/api/internal/server/docs/openapi.yaml +++ b/api/internal/server/docs/openapi.yaml @@ -26,7 +26,7 @@ paths: application/json: schema: items: - $ref: '#/components/schemas/Submittal' + $ref: '#/components/schemas/db.VSubmittal' type: array description: OK "400": @@ -116,7 +116,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/AlertSubscription' + $ref: '#/components/schemas/dto.AlertSubscription' description: alert subscription payload required: true responses: @@ -125,7 +125,7 @@ paths: application/json: schema: items: - $ref: '#/components/schemas/AlertSubscription' + $ref: '#/components/schemas/db.AlertProfileSubscription' type: array description: OK "400": @@ -160,7 +160,7 @@ paths: application/json: schema: items: - $ref: '#/components/schemas/AwarePlatformParameterConfig' + $ref: '#/components/schemas/service.AwarePlatformParameterConfig' type: array description: OK "400": @@ -192,7 +192,7 @@ paths: application/json: schema: items: - $ref: '#/components/schemas/AwareParameter' + $ref: '#/components/schemas/db.AwareParameterListRow' type: array description: OK "400": @@ -228,7 +228,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/Datalogger' + $ref: '#/components/schemas/dto.Datalogger' description: datalogger payload required: true responses: @@ -236,9 +236,7 @@ paths: content: application/json: schema: - items: - $ref: '#/components/schemas/DataloggerWithKey' - type: array + $ref: '#/components/schemas/service.DataloggerWithKey' description: OK "400": content: @@ -329,7 +327,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/Datalogger' + $ref: '#/components/schemas/db.VDatalogger' description: OK "400": content: @@ -372,7 +370,7 @@ paths: content: '*/*': schema: - $ref: '#/components/schemas/Datalogger' + $ref: '#/components/schemas/dto.Datalogger' description: datalogger payload required: true responses: @@ -380,7 +378,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/Datalogger' + $ref: '#/components/schemas/db.VDatalogger' description: OK "400": content: @@ -425,7 +423,7 @@ paths: content: '*/*': schema: - $ref: '#/components/schemas/EquivalencyTable' + $ref: '#/components/schemas/dto.EquivalencyTable' description: equivalency table payload required: true responses: @@ -433,7 +431,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/EquivalencyTable' + $ref: '#/components/schemas/db.VDataloggerEquivalencyTable' description: OK "400": content: @@ -480,7 +478,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/DataloggerWithKey' + $ref: '#/components/schemas/service.DataloggerWithKey' description: OK "400": content: @@ -585,7 +583,7 @@ paths: application/json: schema: items: - $ref: '#/components/schemas/EquivalencyTable' + $ref: '#/components/schemas/db.VDataloggerEquivalencyTable' type: array description: OK "400": @@ -636,7 +634,7 @@ paths: content: '*/*': schema: - $ref: '#/components/schemas/EquivalencyTable' + $ref: '#/components/schemas/dto.EquivalencyTable' description: equivalency table payload required: true responses: @@ -644,7 +642,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/EquivalencyTable' + $ref: '#/components/schemas/db.VDataloggerEquivalencyTable' description: OK "400": content: @@ -696,7 +694,7 @@ paths: content: '*/*': schema: - $ref: '#/components/schemas/EquivalencyTable' + $ref: '#/components/schemas/dto.EquivalencyTable' description: equivalency table payload required: true responses: @@ -704,7 +702,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/EquivalencyTable' + $ref: '#/components/schemas/db.VDataloggerEquivalencyTable' description: OK "400": content: @@ -817,7 +815,8 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/DataloggerTablePreview' + additionalProperties: true + type: object description: OK "400": content: @@ -869,7 +868,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/DataloggerTablePreview' + $ref: '#/components/schemas/db.VDataloggerPreview' description: OK "400": content: @@ -908,7 +907,7 @@ paths: application/json: schema: items: - $ref: '#/components/schemas/Datalogger' + $ref: '#/components/schemas/db.VDatalogger' type: array description: OK "400": @@ -942,7 +941,7 @@ paths: application/json: schema: items: - $ref: '#/components/schemas/District' + $ref: '#/components/schemas/db.VDistrict' type: array description: OK "400": @@ -974,7 +973,7 @@ paths: application/json: schema: items: - $ref: '#/components/schemas/Domain' + $ref: '#/components/schemas/db.PgTimezoneNamesListRow' type: array description: OK "400": @@ -995,7 +994,7 @@ paths: schema: $ref: '#/components/schemas/echo.HTTPError' description: Internal Server Error - summary: lists all domains + summary: lists time zone options tags: - domain /domains/map: @@ -1005,7 +1004,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/DomainMap' + $ref: '#/components/schemas/service.DomainMap' description: OK "400": content: @@ -1043,7 +1042,7 @@ paths: application/json: schema: items: - $ref: '#/components/schemas/EmailAutocompleteResult' + $ref: '#/components/schemas/db.EmailAutocompleteListRow' type: array description: OK "400": @@ -1085,7 +1084,9 @@ paths: schema: items: additionalProperties: - $ref: '#/components/schemas/MeasurementCollectionLean' + items: + $ref: '#/components/schemas/db.MeasurementCollectionLean' + type: array type: object type: array description: OK @@ -1119,7 +1120,7 @@ paths: application/json: schema: items: - $ref: '#/components/schemas/CalculatedTimeseries' + $ref: '#/components/schemas/db.TimeseriesComputedListForInstrumentRow' type: array description: OK "400": @@ -1151,13 +1152,13 @@ paths: schema: type: string responses: - "200": + "201": content: application/json: schema: additionalProperties: true type: object - description: OK + description: Created "400": content: application/json: @@ -1247,7 +1248,7 @@ paths: application/json: schema: items: - $ref: '#/components/schemas/CalculatedTimeseries' + $ref: '#/components/schemas/dto.CalculatedTimeseries' type: array description: OK "400": @@ -1280,10 +1281,7 @@ paths: content: application/json: schema: - items: - additionalProperties: true - type: object - type: array + $ref: '#/components/schemas/service.Healthcheck' description: OK summary: checks the health of the api server tags: @@ -1298,12 +1296,12 @@ paths: schema: type: string responses: - "200": + "201": content: application/json: schema: - $ref: '#/components/schemas/Heartbeat' - description: OK + $ref: '#/components/schemas/service.Heartbeat' + description: Created summary: creates a heartbeat entry at regular intervals tags: - heartbeat @@ -1314,7 +1312,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/Heartbeat' + $ref: '#/components/schemas/service.Heartbeat' description: OK summary: gets the latest heartbeat tags: @@ -1327,7 +1325,7 @@ paths: application/json: schema: items: - $ref: '#/components/schemas/Heartbeat' + $ref: '#/components/schemas/service.Heartbeat' type: array description: OK summary: returns all heartbeats @@ -1340,7 +1338,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/Home' + $ref: '#/components/schemas/db.HomeGetRow' description: OK "500": content: @@ -1351,50 +1349,6 @@ paths: summary: gets information for the homepage tags: - home - /inclinometer_explorer: - post: - requestBody: - content: - application/json: - schema: - items: - type: string - type: array - description: array of inclinometer instrument uuids - required: true - responses: - "200": - content: - application/json: - schema: - items: - additionalProperties: - $ref: '#/components/schemas/InclinometerMeasurementCollectionLean' - type: object - type: array - description: OK - "400": - content: - application/json: - schema: - $ref: '#/components/schemas/echo.HTTPError' - description: Bad Request - "404": - content: - application/json: - schema: - $ref: '#/components/schemas/echo.HTTPError' - description: Not Found - "500": - content: - application/json: - schema: - $ref: '#/components/schemas/echo.HTTPError' - description: Internal Server Error - summary: list inclinometer timeseries measurements for explorer page - tags: - - explorer - x-codegen-request-body-name: instrument_ids /instrument_groups: get: responses: @@ -1403,7 +1357,7 @@ paths: application/json: schema: items: - $ref: '#/components/schemas/InstrumentGroup' + $ref: '#/components/schemas/db.VInstrumentGroup' type: array description: OK "400": @@ -1438,7 +1392,7 @@ paths: content: '*/*': schema: - $ref: '#/components/schemas/InstrumentGroup' + $ref: '#/components/schemas/dto.InstrumentGroup' description: instrument group payload required: true responses: @@ -1446,7 +1400,9 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/InstrumentGroup' + items: + $ref: '#/components/schemas/db.InstrumentGroup' + type: array description: Created "400": content: @@ -1492,9 +1448,8 @@ paths: content: application/json: schema: - items: - $ref: '#/components/schemas/InstrumentGroup' - type: array + additionalProperties: true + type: object description: OK "400": content: @@ -1533,7 +1488,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/InstrumentGroup' + $ref: '#/components/schemas/db.VInstrumentGroup' description: OK "400": content: @@ -1574,7 +1529,7 @@ paths: content: '*/*': schema: - $ref: '#/components/schemas/InstrumentGroup' + $ref: '#/components/schemas/dto.InstrumentGroup' description: instrument group payload required: true responses: @@ -1582,7 +1537,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/InstrumentGroup' + $ref: '#/components/schemas/db.InstrumentGroupUpdateRow' description: OK "400": content: @@ -1624,7 +1579,7 @@ paths: application/json: schema: items: - $ref: '#/components/schemas/Instrument' + $ref: '#/components/schemas/db.VInstrument' type: array description: OK "400": @@ -1762,7 +1717,7 @@ paths: application/json: schema: items: - $ref: '#/components/schemas/Timeseries' + $ref: '#/components/schemas/db.VTimeseries' type: array description: OK "400": @@ -1801,7 +1756,11 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/MeasurementCollection' + additionalProperties: + items: + $ref: '#/components/schemas/db.MeasurementCollectionLean' + type: array + type: object description: OK "400": content: @@ -1824,15 +1783,67 @@ paths: summary: lists timeseries measurements by instrument group id tags: - timeseries - /instruments: + /instruments/count: + get: + responses: + "200": + content: + application/json: + schema: + additionalProperties: true + type: object + description: OK + "400": + content: + application/json: + schema: + $ref: '#/components/schemas/echo.HTTPError' + description: Bad Request + "404": + content: + application/json: + schema: + $ref: '#/components/schemas/echo.HTTPError' + description: Not Found + "500": + content: + application/json: + schema: + $ref: '#/components/schemas/echo.HTTPError' + description: Internal Server Error + summary: gets the total number of non deleted instruments in the system + tags: + - instrument + /instruments/incl/{instrument_id}/measurements: get: + parameters: + - description: instrument uuid + in: path + name: instrument_id + required: true + schema: + format: uuid + type: string + - description: after time + in: query + name: after + schema: + format: date-time + type: string + - description: before time + in: query + name: before + required: true + schema: + format: date-time + type: string responses: "200": content: application/json: schema: items: - $ref: '#/components/schemas/Instrument' + $ref: '#/components/schemas/db.VInclMeasurement' type: array description: OK "400": @@ -1853,17 +1864,80 @@ paths: schema: $ref: '#/components/schemas/echo.HTTPError' description: Internal Server Error - summary: lists all instruments + summary: creates instrument notes tags: - - instrument - /instruments/count: + - instrument-incl + /instruments/incl/{instrument_id}/segments: get: + parameters: + - description: instrument uuid + in: path + name: instrument_id + required: true + schema: + format: uuid + type: string + responses: + "200": + content: + application/json: + schema: + items: + $ref: '#/components/schemas/db.VInclSegment' + type: array + description: OK + "400": + content: + application/json: + schema: + $ref: '#/components/schemas/echo.HTTPError' + description: Bad Request + "404": + content: + application/json: + schema: + $ref: '#/components/schemas/echo.HTTPError' + description: Not Found + "500": + content: + application/json: + schema: + $ref: '#/components/schemas/echo.HTTPError' + description: Internal Server Error + summary: gets all incl segments for an instrument + tags: + - instrument-incl + put: + parameters: + - description: instrument uuid + in: path + name: instrument_id + required: true + schema: + format: uuid + type: string + - description: api key + in: query + name: key + schema: + type: string + requestBody: + content: + '*/*': + schema: + items: + $ref: '#/components/schemas/dto.InclSegment' + type: array + description: incl instrument segments payload + required: true responses: "200": content: application/json: schema: - $ref: '#/components/schemas/InstrumentCount' + items: + $ref: '#/components/schemas/dto.InclSegment' + type: array description: OK "400": content: @@ -1883,9 +1957,12 @@ paths: schema: $ref: '#/components/schemas/echo.HTTPError' description: Internal Server Error - summary: gets the total number of non deleted instruments in the system + security: + - Bearer: [] + summary: updates multiple segments for an incl instrument tags: - - instrument + - instrument-incl + x-codegen-request-body-name: instrument_segments /instruments/ipi/{instrument_id}/measurements: get: parameters: @@ -1915,7 +1992,7 @@ paths: application/json: schema: items: - $ref: '#/components/schemas/IpiMeasurements' + $ref: '#/components/schemas/db.VIpiMeasurement' type: array description: OK "400": @@ -1955,7 +2032,7 @@ paths: application/json: schema: items: - $ref: '#/components/schemas/IpiSegment' + $ref: '#/components/schemas/db.VIpiSegment' type: array description: OK "400": @@ -1998,7 +2075,7 @@ paths: '*/*': schema: items: - $ref: '#/components/schemas/IpiSegment' + $ref: '#/components/schemas/dto.IpiSegment' type: array description: ipi instrument segments payload required: true @@ -2008,7 +2085,7 @@ paths: application/json: schema: items: - $ref: '#/components/schemas/IpiSegment' + $ref: '#/components/schemas/dto.IpiSegment' type: array description: OK "400": @@ -2036,37 +2113,6 @@ paths: - instrument-ipi x-codegen-request-body-name: instrument_segments /instruments/notes: - get: - responses: - "200": - content: - application/json: - schema: - items: - $ref: '#/components/schemas/InstrumentNote' - type: array - description: OK - "400": - content: - application/json: - schema: - $ref: '#/components/schemas/echo.HTTPError' - description: Bad Request - "404": - content: - application/json: - schema: - $ref: '#/components/schemas/echo.HTTPError' - description: Not Found - "500": - content: - application/json: - schema: - $ref: '#/components/schemas/echo.HTTPError' - description: Internal Server Error - summary: gets all instrument notes - tags: - - instrument-note post: parameters: - description: api key @@ -2078,18 +2124,18 @@ paths: content: '*/*': schema: - $ref: '#/components/schemas/InstrumentNoteCollection' + $ref: '#/components/schemas/dto.InstrumentNoteCollection' description: instrument note collection payload required: true responses: - "200": + "201": content: application/json: schema: items: - $ref: '#/components/schemas/InstrumentNote' + $ref: '#/components/schemas/db.InstrumentNote' type: array - description: OK + description: Created "400": content: application/json: @@ -2129,7 +2175,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/InstrumentNote' + $ref: '#/components/schemas/db.InstrumentNote' description: OK "400": content: @@ -2170,7 +2216,7 @@ paths: content: '*/*': schema: - $ref: '#/components/schemas/InstrumentNote' + $ref: '#/components/schemas/dto.InstrumentNote' description: instrument note collection payload required: true responses: @@ -2179,7 +2225,7 @@ paths: application/json: schema: items: - $ref: '#/components/schemas/AlertConfig' + $ref: '#/components/schemas/db.InstrumentNote' type: array description: OK "400": @@ -2235,7 +2281,7 @@ paths: application/json: schema: items: - $ref: '#/components/schemas/SaaMeasurements' + $ref: '#/components/schemas/db.VSaaMeasurement' type: array description: OK "400": @@ -2275,7 +2321,7 @@ paths: application/json: schema: items: - $ref: '#/components/schemas/SaaSegment' + $ref: '#/components/schemas/db.VSaaSegment' type: array description: OK "400": @@ -2318,7 +2364,7 @@ paths: '*/*': schema: items: - $ref: '#/components/schemas/SaaSegment' + $ref: '#/components/schemas/dto.SaaSegment' type: array description: saa instrument segments payload required: true @@ -2328,7 +2374,7 @@ paths: application/json: schema: items: - $ref: '#/components/schemas/SaaSegment' + $ref: '#/components/schemas/dto.SaaSegment' type: array description: OK "400": @@ -2370,7 +2416,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/Instrument' + $ref: '#/components/schemas/db.VInstrument' description: OK "400": content: @@ -2409,7 +2455,7 @@ paths: application/json: schema: items: - $ref: '#/components/schemas/InstrumentNote' + $ref: '#/components/schemas/db.InstrumentNote' type: array description: OK "400": @@ -2502,7 +2548,7 @@ paths: application/json: schema: items: - $ref: '#/components/schemas/InstrumentStatus' + $ref: '#/components/schemas/db.VInstrumentStatus' type: array description: OK "400": @@ -2544,7 +2590,7 @@ paths: content: '*/*': schema: - $ref: '#/components/schemas/InstrumentStatusCollection' + $ref: '#/components/schemas/dto.InstrumentStatusCollection' description: instrument status collection paylaod required: true responses: @@ -2654,7 +2700,7 @@ paths: application/json: schema: items: - $ref: '#/components/schemas/AlertConfig' + $ref: '#/components/schemas/db.VInstrumentStatus' type: array description: OK "400": @@ -2699,7 +2745,7 @@ paths: application/json: schema: items: - $ref: '#/components/schemas/Submittal' + $ref: '#/components/schemas/db.VSubmittal' type: array description: OK "400": @@ -2745,7 +2791,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/Timeseries' + $ref: '#/components/schemas/db.VTimeseries' description: OK "400": content: @@ -2807,7 +2853,9 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/MeasurementCollection' + items: + $ref: '#/components/schemas/db.MeasurementCollection' + type: array description: OK "400": content: @@ -2862,7 +2910,11 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/MeasurementCollection' + additionalProperties: + items: + $ref: '#/components/schemas/db.MeasurementCollectionLean' + type: array + type: object description: OK "400": content: @@ -2899,7 +2951,7 @@ paths: application/json: schema: items: - $ref: '#/components/schemas/AlertSubscription' + $ref: '#/components/schemas/db.AlertProfileSubscription' type: array description: OK "400": @@ -2940,7 +2992,7 @@ paths: application/json: schema: items: - $ref: '#/components/schemas/Alert' + $ref: '#/components/schemas/db.AlertListForProfileRow' type: array description: OK "400": @@ -2985,12 +3037,12 @@ paths: schema: type: string responses: - "200": + "201": content: application/json: schema: - $ref: '#/components/schemas/Alert' - description: OK + $ref: '#/components/schemas/db.AlertGetRow' + description: Created "400": content: application/json: @@ -3037,7 +3089,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/Alert' + $ref: '#/components/schemas/db.AlertGetRow' description: OK "400": content: @@ -3069,7 +3121,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/Profile' + $ref: '#/components/schemas/db.VProfile' description: OK "400": content: @@ -3108,7 +3160,7 @@ paths: application/json: schema: items: - $ref: '#/components/schemas/Project' + $ref: '#/components/schemas/db.VProject' type: array description: OK "400": @@ -3142,7 +3194,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/Token' + $ref: '#/components/schemas/service.Token' description: OK "400": content: @@ -3208,38 +3260,6 @@ paths: summary: deletes a token for a profile tags: - profile - /opendcs/sites: - get: - responses: - "200": - content: - text/xml: - schema: - items: - $ref: '#/components/schemas/Site' - type: array - description: OK - "400": - content: - text/xml: - schema: - $ref: '#/components/schemas/echo.HTTPError' - description: Bad Request - "404": - content: - text/xml: - schema: - $ref: '#/components/schemas/echo.HTTPError' - description: Not Found - "500": - content: - text/xml: - schema: - $ref: '#/components/schemas/echo.HTTPError' - description: Internal Server Error - summary: "lists all instruments, represented as opendcs sites" - tags: - - opendcs /profiles: post: responses: @@ -3247,7 +3267,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/Profile' + $ref: '#/components/schemas/db.ProfileCreateRow' description: OK "400": content: @@ -3286,7 +3306,7 @@ paths: application/json: schema: items: - $ref: '#/components/schemas/Project' + $ref: '#/components/schemas/db.VProject' type: array description: OK "400": @@ -3322,19 +3342,19 @@ paths: '*/*': schema: items: - $ref: '#/components/schemas/Project' + $ref: '#/components/schemas/dto.Project' type: array description: project collection payload required: true responses: - "200": + "201": content: application/json: schema: items: - $ref: '#/components/schemas/IDSlugName' + $ref: '#/components/schemas/db.ProjectCreateBatchRow' type: array - description: OK + description: Created "400": content: application/json: @@ -3366,7 +3386,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/ProjectCount' + $ref: '#/components/schemas/service.ProjectCount' description: OK "400": content: @@ -3449,7 +3469,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/Project' + $ref: '#/components/schemas/db.VProject' description: OK "400": content: @@ -3490,7 +3510,7 @@ paths: content: '*/*': schema: - $ref: '#/components/schemas/Project' + $ref: '#/components/schemas/dto.Project' description: project payload required: true responses: @@ -3498,7 +3518,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/Project' + $ref: '#/components/schemas/db.VProject' description: OK "400": content: @@ -3540,7 +3560,7 @@ paths: application/json: schema: items: - $ref: '#/components/schemas/AlertConfig' + $ref: '#/components/schemas/db.VAlertConfig' type: array description: OK "400": @@ -3582,7 +3602,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/AlertConfig' + $ref: '#/components/schemas/dto.AlertConfig' description: alert config payload required: true responses: @@ -3590,7 +3610,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/AlertConfig' + $ref: '#/components/schemas/db.VAlertConfig' description: OK "400": content: @@ -3643,9 +3663,8 @@ paths: content: application/json: schema: - items: - $ref: '#/components/schemas/AlertConfig' - type: array + additionalProperties: true + type: object description: OK "400": content: @@ -3691,7 +3710,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/AlertConfig' + $ref: '#/components/schemas/db.VAlertConfig' description: OK "400": content: @@ -3739,7 +3758,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/AlertConfig' + $ref: '#/components/schemas/dto.AlertConfig' description: alert config payload required: true responses: @@ -3747,9 +3766,7 @@ paths: content: application/json: schema: - items: - $ref: '#/components/schemas/AlertConfig' - type: array + $ref: '#/components/schemas/db.VAlertConfig' description: OK "400": content: @@ -3791,7 +3808,7 @@ paths: application/json: schema: items: - $ref: '#/components/schemas/AlertConfig' + $ref: '#/components/schemas/db.CollectionGroup' type: array description: OK "400": @@ -3835,7 +3852,7 @@ paths: content: '*/*': schema: - $ref: '#/components/schemas/CollectionGroup' + $ref: '#/components/schemas/dto.CollectionGroup' description: collection group payload required: true responses: @@ -3844,7 +3861,7 @@ paths: application/json: schema: items: - $ref: '#/components/schemas/CollectionGroup' + $ref: '#/components/schemas/db.CollectionGroup' type: array description: OK "400": @@ -3945,7 +3962,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/CollectionGroupDetails' + $ref: '#/components/schemas/db.VCollectionGroupDetails' description: OK "400": content: @@ -3992,7 +4009,7 @@ paths: content: '*/*': schema: - $ref: '#/components/schemas/CollectionGroup' + $ref: '#/components/schemas/dto.CollectionGroup' description: collection group payload required: true responses: @@ -4000,7 +4017,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/CollectionGroup' + $ref: '#/components/schemas/db.CollectionGroup' description: OK "400": content: @@ -4087,6 +4104,65 @@ paths: tags: - collection-groups post: + parameters: + - description: project uuid + in: path + name: project_id + required: true + schema: + format: uuid + type: string + - description: collection group uuid + in: path + name: collection_group_id + required: true + schema: + format: uuid + type: string + - description: timeseries uuid + in: path + name: timeseries_id + required: true + schema: + format: uuid + type: string + - description: api key + in: query + name: key + schema: + type: string + responses: + "201": + content: + application/json: + schema: + additionalProperties: true + type: object + description: Created + "400": + content: + application/json: + schema: + $ref: '#/components/schemas/echo.HTTPError' + description: Bad Request + "404": + content: + application/json: + schema: + $ref: '#/components/schemas/echo.HTTPError' + description: Not Found + "500": + content: + application/json: + schema: + $ref: '#/components/schemas/echo.HTTPError' + description: Internal Server Error + security: + - Bearer: [] + summary: adds a timeseries to a collection group + tags: + - collection-groups + put: parameters: - description: project uuid in: path @@ -4142,7 +4218,7 @@ paths: description: Internal Server Error security: - Bearer: [] - summary: adds a timeseries to a collection group + summary: updates sort order for collection group timesries tags: - collection-groups /projects/{project_id}/district_rollup/evaluation_submittals: @@ -4161,7 +4237,7 @@ paths: application/json: schema: items: - $ref: '#/components/schemas/DistrictRollup' + $ref: '#/components/schemas/db.VDistrictRollup' type: array description: OK "400": @@ -4201,7 +4277,7 @@ paths: application/json: schema: items: - $ref: '#/components/schemas/DistrictRollup' + $ref: '#/components/schemas/db.VDistrictRollup' type: array description: OK "400": @@ -4241,7 +4317,7 @@ paths: application/json: schema: items: - $ref: '#/components/schemas/Evaluation' + $ref: '#/components/schemas/db.VEvaluation' type: array description: OK "400": @@ -4283,16 +4359,16 @@ paths: content: '*/*': schema: - $ref: '#/components/schemas/Evaluation' + $ref: '#/components/schemas/dto.Evaluation' description: evaluation payload required: true responses: - "200": + "201": content: application/json: schema: - $ref: '#/components/schemas/Evaluation' - description: OK + $ref: '#/components/schemas/db.VEvaluation' + description: Created "400": content: application/json: @@ -4345,7 +4421,8 @@ paths: application/json: schema: items: - $ref: '#/components/schemas/AlertConfig' + additionalProperties: true + type: object type: array description: OK "400": @@ -4392,7 +4469,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/Evaluation' + $ref: '#/components/schemas/db.VEvaluation' description: OK "400": content: @@ -4440,7 +4517,7 @@ paths: content: '*/*': schema: - $ref: '#/components/schemas/Evaluation' + $ref: '#/components/schemas/dto.Evaluation' description: evaluation payload required: true responses: @@ -4448,7 +4525,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/Evaluation' + $ref: '#/components/schemas/db.VEvaluation' description: OK "400": content: @@ -4520,61 +4597,6 @@ paths: summary: uploades a picture for a project tags: - project - /projects/{project_id}/inclinometer_measurements: - post: - parameters: - - description: project uuid - in: path - name: project_id - required: true - schema: - format: uuid - type: string - - description: api key - in: query - name: key - schema: - type: string - requestBody: - content: - '*/*': - schema: - $ref: '#/components/schemas/InclinometerMeasurementCollectionCollection' - description: inclinometer measurement collections - required: true - responses: - "200": - content: - application/json: - schema: - items: - $ref: '#/components/schemas/InclinometerMeasurementCollection' - type: array - description: OK - "400": - content: - application/json: - schema: - $ref: '#/components/schemas/echo.HTTPError' - description: Bad Request - "404": - content: - application/json: - schema: - $ref: '#/components/schemas/echo.HTTPError' - description: Not Found - "500": - content: - application/json: - schema: - $ref: '#/components/schemas/echo.HTTPError' - description: Internal Server Error - security: - - Bearer: [] - summary: creates or updates one or more inclinometer measurements - tags: - - measurement-inclinometer - x-codegen-request-body-name: timeseries_measurement_collections /projects/{project_id}/instrument_groups: get: parameters: @@ -4591,7 +4613,7 @@ paths: application/json: schema: items: - $ref: '#/components/schemas/InstrumentGroup' + $ref: '#/components/schemas/db.VInstrumentGroup' type: array description: OK "400": @@ -4631,7 +4653,7 @@ paths: application/json: schema: items: - $ref: '#/components/schemas/Project' + $ref: '#/components/schemas/db.VInstrument' type: array description: OK "400": @@ -4681,19 +4703,19 @@ paths: '*/*': schema: items: - $ref: '#/components/schemas/Instrument' + $ref: '#/components/schemas/dto.Instrument' type: array description: instrument collection payload required: true responses: - "200": + "201": content: application/json: schema: items: - $ref: '#/components/schemas/IDSlugName' + $ref: '#/components/schemas/db.InstrumentCreateBatchRow' type: array - description: OK + description: Created "400": content: application/json: @@ -4745,7 +4767,7 @@ paths: content: '*/*': schema: - $ref: '#/components/schemas/ProjectInstrumentAssignments' + $ref: '#/components/schemas/dto.ProjectInstrumentAssignments' description: instrument uuids required: true responses: @@ -4753,7 +4775,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/InstrumentsValidation' + $ref: '#/components/schemas/service.InstrumentsValidation' description: OK "400": content: @@ -4857,7 +4879,7 @@ paths: content: '*/*': schema: - $ref: '#/components/schemas/Instrument' + $ref: '#/components/schemas/dto.Instrument' description: instrument payload required: true responses: @@ -4865,7 +4887,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/Instrument' + $ref: '#/components/schemas/db.VInstrument' description: OK "400": content: @@ -4914,7 +4936,7 @@ paths: application/json: schema: items: - $ref: '#/components/schemas/AlertConfig' + $ref: '#/components/schemas/db.VAlertConfig' type: array description: OK "400": @@ -4968,12 +4990,12 @@ paths: schema: type: string responses: - "200": + "201": content: application/json: schema: - $ref: '#/components/schemas/AlertSubscription' - description: OK + $ref: '#/components/schemas/db.AlertProfileSubscription' + description: Created "400": content: application/json: @@ -5081,7 +5103,7 @@ paths: application/json: schema: items: - $ref: '#/components/schemas/Alert' + $ref: '#/components/schemas/db.VAlert' type: array description: OK "400": @@ -5139,7 +5161,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/InstrumentsValidation' + $ref: '#/components/schemas/service.InstrumentsValidation' description: OK "400": content: @@ -5188,12 +5210,12 @@ paths: schema: type: string responses: - "200": + "201": content: application/json: schema: - $ref: '#/components/schemas/InstrumentsValidation' - description: OK + $ref: '#/components/schemas/service.InstrumentsValidation' + description: Created "400": content: application/json: @@ -5250,7 +5272,7 @@ paths: content: '*/*': schema: - $ref: '#/components/schemas/InstrumentProjectAssignments' + $ref: '#/components/schemas/dto.InstrumentProjectAssignments' description: project uuids required: true responses: @@ -5258,7 +5280,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/InstrumentsValidation' + $ref: '#/components/schemas/service.InstrumentsValidation' description: OK "400": content: @@ -5307,7 +5329,7 @@ paths: application/json: schema: items: - $ref: '#/components/schemas/Timeseries' + $ref: '#/components/schemas/db.VTimeseries' type: array description: OK "400": @@ -5356,7 +5378,7 @@ paths: content: '*/*': schema: - $ref: '#/components/schemas/TimeseriesCollectionItems' + $ref: '#/components/schemas/dto.TimeseriesCollectionItems' description: timeseries collection items payload required: true responses: @@ -5365,7 +5387,7 @@ paths: application/json: schema: items: - $ref: '#/components/schemas/Timeseries' + $ref: '#/components/schemas/db.TimeseriesCreateBatchRow' type: array description: OK "400": @@ -5475,7 +5497,7 @@ paths: application/json: schema: items: - $ref: '#/components/schemas/Evaluation' + $ref: '#/components/schemas/dto.Evaluation' type: array description: OK "400": @@ -5525,7 +5547,7 @@ paths: content: '*/*': schema: - $ref: '#/components/schemas/Instrument' + $ref: '#/components/schemas/dto.Instrument' description: instrument payload required: true responses: @@ -5533,7 +5555,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/Instrument' + $ref: '#/components/schemas/db.VInstrument' description: OK "400": content: @@ -5582,7 +5604,7 @@ paths: application/json: schema: items: - $ref: '#/components/schemas/Timeseries' + $ref: '#/components/schemas/db.VTimeseries' type: array description: OK "400": @@ -5629,7 +5651,7 @@ paths: application/json: schema: items: - $ref: '#/components/schemas/TimeseriesCwms' + $ref: '#/components/schemas/db.VTimeseriesCwms' type: array description: OK "400": @@ -5674,19 +5696,18 @@ paths: '*/*': schema: items: - $ref: '#/components/schemas/TimeseriesCwms' + $ref: '#/components/schemas/dto.TimeseriesCwms' type: array description: array of cwms timeseries to create required: true responses: - "200": + "201": content: application/json: schema: - items: - $ref: '#/components/schemas/TimeseriesCwms' - type: array - description: OK + additionalProperties: true + type: object + description: Created "400": content: application/json: @@ -5737,7 +5758,7 @@ paths: content: '*/*': schema: - $ref: '#/components/schemas/TimeseriesCwms' + $ref: '#/components/schemas/dto.TimeseriesCwms' description: cwms timeseries to update required: true responses: @@ -5746,7 +5767,8 @@ paths: application/json: schema: items: - $ref: '#/components/schemas/TimeseriesCwms' + additionalProperties: true + type: object type: array description: OK "400": @@ -5792,7 +5814,7 @@ paths: application/json: schema: items: - $ref: '#/components/schemas/ProjectMembership' + $ref: '#/components/schemas/db.ProfileProjectRoleListForProjectRow' type: array description: OK "400": @@ -5907,12 +5929,12 @@ paths: schema: type: string responses: - "200": + "201": content: application/json: schema: - $ref: '#/components/schemas/ProjectMembership' - description: OK + $ref: '#/components/schemas/db.ProfileProjectRoleGetRow' + description: Created "400": content: application/json: @@ -5952,7 +5974,7 @@ paths: application/json: schema: items: - $ref: '#/components/schemas/PlotConfig' + $ref: '#/components/schemas/db.VPlotConfiguration' type: array description: OK "400": @@ -5995,16 +6017,16 @@ paths: content: '*/*': schema: - $ref: '#/components/schemas/PlotConfigBullseyePlot' + $ref: '#/components/schemas/dto.PlotConfigBullseyePlot' description: plot config payload required: true responses: - "200": + "201": content: application/json: schema: - $ref: '#/components/schemas/PlotConfig' - description: OK + $ref: '#/components/schemas/db.VPlotConfiguration' + description: Created "400": content: application/json: @@ -6055,7 +6077,7 @@ paths: content: '*/*': schema: - $ref: '#/components/schemas/PlotConfigBullseyePlot' + $ref: '#/components/schemas/dto.PlotConfigBullseyePlot' description: plot config payload required: true responses: @@ -6063,7 +6085,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/PlotConfig' + $ref: '#/components/schemas/db.VPlotConfiguration' description: OK "400": content: @@ -6117,7 +6139,7 @@ paths: application/json: schema: items: - $ref: '#/components/schemas/PlotConfigMeasurementBullseyePlot' + $ref: '#/components/schemas/db.PlotConfigMeasurementListBullseyeRow' type: array description: OK "400": @@ -6162,16 +6184,16 @@ paths: content: '*/*': schema: - $ref: '#/components/schemas/PlotConfigContourPlot' + $ref: '#/components/schemas/dto.PlotConfigContourPlot' description: plot config payload required: true responses: - "200": + "201": content: application/json: schema: - $ref: '#/components/schemas/PlotConfig' - description: OK + $ref: '#/components/schemas/db.VPlotConfiguration' + description: Created "400": content: application/json: @@ -6222,7 +6244,7 @@ paths: content: '*/*': schema: - $ref: '#/components/schemas/PlotConfigContourPlot' + $ref: '#/components/schemas/dto.PlotConfigContourPlot' description: plot config payload required: true responses: @@ -6230,7 +6252,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/PlotConfig' + $ref: '#/components/schemas/db.VPlotConfiguration' description: OK "400": content: @@ -6289,7 +6311,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/AggregatePlotConfigMeasurementsContourPlot' + $ref: '#/components/schemas/service.AggregatePlotConfigMeasurementsContourPlot' description: OK "400": content: @@ -6398,16 +6420,16 @@ paths: content: '*/*': schema: - $ref: '#/components/schemas/PlotConfigProfilePlot' + $ref: '#/components/schemas/dto.PlotConfigProfilePlot' description: plot config payload required: true responses: - "200": + "201": content: application/json: schema: - $ref: '#/components/schemas/PlotConfig' - description: OK + $ref: '#/components/schemas/db.VPlotConfiguration' + description: Created "400": content: application/json: @@ -6458,7 +6480,7 @@ paths: content: '*/*': schema: - $ref: '#/components/schemas/PlotConfigProfilePlot' + $ref: '#/components/schemas/dto.PlotConfigProfilePlot' description: plot config payload required: true responses: @@ -6466,7 +6488,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/PlotConfig' + $ref: '#/components/schemas/db.VPlotConfiguration' description: OK "400": content: @@ -6511,16 +6533,16 @@ paths: content: '*/*': schema: - $ref: '#/components/schemas/PlotConfigScatterLinePlot' + $ref: '#/components/schemas/dto.PlotConfigScatterLinePlot' description: plot config payload required: true responses: - "200": + "201": content: application/json: schema: - $ref: '#/components/schemas/PlotConfig' - description: OK + $ref: '#/components/schemas/db.VPlotConfiguration' + description: Created "400": content: application/json: @@ -6571,7 +6593,7 @@ paths: content: '*/*': schema: - $ref: '#/components/schemas/PlotConfigScatterLinePlot' + $ref: '#/components/schemas/dto.PlotConfigScatterLinePlot' description: plot config payload required: true responses: @@ -6579,7 +6601,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/PlotConfig' + $ref: '#/components/schemas/db.VPlotConfiguration' description: OK "400": content: @@ -6679,7 +6701,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/PlotConfig' + $ref: '#/components/schemas/db.VPlotConfiguration' description: OK "400": content: @@ -6718,7 +6740,7 @@ paths: application/json: schema: items: - $ref: '#/components/schemas/PlotConfig' + $ref: '#/components/schemas/db.VPlotConfiguration' type: array description: OK "400": @@ -6760,16 +6782,16 @@ paths: content: '*/*': schema: - $ref: '#/components/schemas/PlotConfigScatterLinePlot' + $ref: '#/components/schemas/dto.PlotConfigScatterLinePlot' description: plot config payload required: true responses: - "200": + "201": content: application/json: schema: - $ref: '#/components/schemas/PlotConfig' - description: OK + $ref: '#/components/schemas/db.VPlotConfiguration' + description: Created "400": content: application/json: @@ -6868,7 +6890,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/PlotConfig' + $ref: '#/components/schemas/db.VPlotConfiguration' description: OK "400": content: @@ -6916,7 +6938,7 @@ paths: content: '*/*': schema: - $ref: '#/components/schemas/PlotConfigScatterLinePlot' + $ref: '#/components/schemas/dto.PlotConfigScatterLinePlot' description: plot config payload required: true responses: @@ -6924,7 +6946,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/PlotConfig' + $ref: '#/components/schemas/db.VPlotConfiguration' description: OK "400": content: @@ -6970,7 +6992,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/ReportConfig' + $ref: '#/components/schemas/db.VReportConfig' description: OK "400": content: @@ -7013,7 +7035,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/ReportConfig' + $ref: '#/components/schemas/dto.ReportConfig' description: report config payload required: true responses: @@ -7021,7 +7043,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/ReportConfig' + $ref: '#/components/schemas/db.VReportConfig' description: Created "400": content: @@ -7125,7 +7147,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/ReportConfig' + $ref: '#/components/schemas/dto.ReportConfig' description: report config payload required: true responses: @@ -7187,7 +7209,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/ReportDownloadJob' + $ref: '#/components/schemas/db.ReportDownloadJob' description: Created "400": content: @@ -7246,7 +7268,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/ReportDownloadJob' + $ref: '#/components/schemas/db.ReportDownloadJob' description: OK "400": content: @@ -7347,7 +7369,7 @@ paths: application/json: schema: items: - $ref: '#/components/schemas/Submittal' + $ref: '#/components/schemas/db.VSubmittal' type: array description: OK "400": @@ -7387,7 +7409,7 @@ paths: application/json: schema: items: - $ref: '#/components/schemas/Timeseries' + $ref: '#/components/schemas/db.VTimeseries' type: array description: OK "400": @@ -7430,7 +7452,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/TimeseriesMeasurementCollectionCollection' + $ref: '#/components/schemas/dto.TimeseriesMeasurementCollectionCollection' description: array of timeseries measurement collections required: true responses: @@ -7438,9 +7460,8 @@ paths: content: application/json: schema: - items: - $ref: '#/components/schemas/MeasurementCollection' - type: array + additionalProperties: true + type: object description: OK "400": content: @@ -7496,7 +7517,7 @@ paths: content: '*/*': schema: - $ref: '#/components/schemas/TimeseriesMeasurementCollectionCollection' + $ref: '#/components/schemas/dto.TimeseriesMeasurementCollectionCollection' description: array of timeseries measurement collections required: true responses: @@ -7505,7 +7526,8 @@ paths: application/json: schema: items: - $ref: '#/components/schemas/MeasurementCollection' + additionalProperties: true + type: object type: array description: OK "400": @@ -7532,113 +7554,85 @@ paths: tags: - measurement x-codegen-request-body-name: timeseries_measurement_collections - /projects/{project_slug}/images/{uri_path}: + /projects/{project_id}/uploader_configs: get: parameters: - - description: project abbr - in: path - name: project_slug - required: true - schema: - type: string - - description: uri path of requested resource + - description: project uuid in: path - name: uri_path + name: project_id required: true schema: + format: uuid type: string responses: "200": - content: {} + content: + application/json: + schema: + items: + $ref: '#/components/schemas/db.VUploaderConfig' + type: array description: OK "400": content: - image/jpeg: + application/json: schema: $ref: '#/components/schemas/echo.HTTPError' description: Bad Request - "404": - content: - image/jpeg: - schema: - $ref: '#/components/schemas/echo.HTTPError' - description: Not Found - "500": - content: - image/jpeg: - schema: - $ref: '#/components/schemas/echo.HTTPError' - description: Internal Server Error - summary: "serves media, files, etc for a given project" + summary: lists uploader configs for a project tags: - - media - /report_configs/{report_config_id}/plot_configs: - get: + - uploader + post: parameters: - - description: report config uuid + - description: project uuid in: path - name: report_config_id + name: project_id required: true schema: format: uuid type: string - - description: api key - in: query - name: key + requestBody: + content: + '*/*': + schema: + $ref: '#/components/schemas/dto.UploaderConfig' + description: uploader config payload required: true - schema: - type: string responses: - "200": + "201": content: application/json: schema: - $ref: '#/components/schemas/ReportConfigWithPlotConfigs' - description: OK + additionalProperties: true + type: object + description: Created "400": content: application/json: schema: $ref: '#/components/schemas/echo.HTTPError' description: Bad Request - "404": - content: - application/json: - schema: - $ref: '#/components/schemas/echo.HTTPError' - description: Not Found - "500": - content: - application/json: - schema: - $ref: '#/components/schemas/echo.HTTPError' - description: Internal Server Error - summary: Lists all plot configs for a report config + summary: creates an uploader config tags: - - report-config - /report_jobs/{job_id}: - put: + - uploader + x-codegen-request-body-name: uploader_config + /projects/{project_id}/uploader_configs/{uploader_config_id}: + delete: parameters: - - description: download job uuid + - description: project uuid in: path - name: job_id + name: project_id required: true schema: format: uuid type: string - - description: api key - in: query - name: key + - description: uploader config uuid + in: path + name: uploader_config_id required: true schema: + format: uuid type: string - requestBody: - content: - application/json: - schema: - $ref: '#/components/schemas/ReportDownloadJob' - description: report download job payload - required: true responses: "200": content: @@ -7653,35 +7647,99 @@ paths: schema: $ref: '#/components/schemas/echo.HTTPError' description: Bad Request - "404": + summary: deletes an uploader config + tags: + - uploader + put: + parameters: + - description: project uuid + in: path + name: project_id + required: true + schema: + format: uuid + type: string + - description: uploader config uuid + in: path + name: uploader_config_id + required: true + schema: + format: uuid + type: string + requestBody: + content: + '*/*': + schema: + $ref: '#/components/schemas/dto.UploaderConfig' + description: uploader config payload + required: true + responses: + "200": + content: + application/json: + schema: + additionalProperties: true + type: object + description: OK + "400": content: application/json: schema: $ref: '#/components/schemas/echo.HTTPError' - description: Not Found - "500": + description: Bad Request + summary: updates an uploader config + tags: + - uploader + x-codegen-request-body-name: uploader_config + /projects/{project_id}/uploader_configs/{uploader_config_id}/mappings: + delete: + parameters: + - description: project uuid + in: path + name: project_id + required: true + schema: + format: uuid + type: string + - description: uploader config uuid + in: path + name: uploader_config_id + required: true + schema: + format: uuid + type: string + responses: + "200": + content: + application/json: + schema: + additionalProperties: true + type: object + description: OK + "400": content: application/json: schema: $ref: '#/components/schemas/echo.HTTPError' - description: Internal Server Error - summary: updates a job that creates a pdf report + description: Bad Request + summary: updates mappings for an uploader config tags: - - report-config - x-codegen-request-body-name: report_download_job - /search/{entity}: + - uploader get: parameters: - - description: "entity to search (i.e. projects, etc.)" + - description: project uuid in: path - name: entity + name: project_id required: true schema: + format: uuid type: string - - description: search string - in: query - name: q + - description: uploader config uuid + in: path + name: uploader_config_id + required: true schema: + format: uuid type: string responses: "200": @@ -7689,7 +7747,7 @@ paths: application/json: schema: items: - $ref: '#/components/schemas/SearchResult' + $ref: '#/components/schemas/db.UploaderConfigMapping' type: array description: OK "400": @@ -7698,36 +7756,77 @@ paths: schema: $ref: '#/components/schemas/echo.HTTPError' description: Bad Request - "404": + summary: lists timeseries mappings for an uploader config + tags: + - uploader + post: + parameters: + - description: project uuid + in: path + name: project_id + required: true + schema: + format: uuid + type: string + - description: uploader config uuid + in: path + name: uploader_config_id + required: true + schema: + format: uuid + type: string + requestBody: + content: + '*/*': + schema: + items: + $ref: '#/components/schemas/dto.UploaderConfigMapping' + type: array + description: uploader config mappings payload + required: true + responses: + "201": content: application/json: schema: - $ref: '#/components/schemas/echo.HTTPError' - description: Not Found - "500": + additionalProperties: true + type: object + description: Created + "400": content: application/json: schema: $ref: '#/components/schemas/echo.HTTPError' - description: Internal Server Error - summary: allows searching using a string on different entities + description: Bad Request + summary: creates mappings for an uploader config tags: - - search - /submittals/{submittal_id}/verify_missing: + - uploader + x-codegen-request-body-name: uploader_config_mappings put: parameters: - - description: submittal uuid + - description: project uuid in: path - name: submittal_id + name: project_id required: true schema: format: uuid type: string - - description: api key - in: query - name: key + - description: uploader config uuid + in: path + name: uploader_config_id + required: true schema: + format: uuid type: string + requestBody: + content: + '*/*': + schema: + items: + $ref: '#/components/schemas/dto.UploaderConfigMapping' + type: array + description: uploader config mappings payload + required: true responses: "200": content: @@ -7742,48 +7841,76 @@ paths: schema: $ref: '#/components/schemas/echo.HTTPError' description: Bad Request + summary: updates mappings for an uploader config + tags: + - uploader + x-codegen-request-body-name: uploader_config_mappings + /projects/{project_slug}/images/{uri_path}: + get: + parameters: + - description: project abbr + in: path + name: project_slug + required: true + schema: + type: string + - description: uri path of requested resource + in: path + name: uri_path + required: true + schema: + type: string + responses: + "200": + content: + image/jpeg: + schema: + format: binary + type: string + description: OK + "400": + content: + image/jpeg: + schema: + $ref: '#/components/schemas/echo.HTTPError' + description: Bad Request "404": content: - application/json: + image/jpeg: schema: $ref: '#/components/schemas/echo.HTTPError' description: Not Found "500": content: - application/json: + image/jpeg: schema: $ref: '#/components/schemas/echo.HTTPError' description: Internal Server Error - security: - - Bearer: [] - summary: verifies the specified submittal is "missing" and will not be completed + summary: "serves media, files, etc for a given project" tags: - - submittal - /timeseries: - post: + - media + /report_configs/{report_config_id}/plot_configs: + get: parameters: + - description: report config uuid + in: path + name: report_config_id + required: true + schema: + format: uuid + type: string - description: api key in: query name: key + required: true schema: type: string - requestBody: - content: - '*/*': - schema: - $ref: '#/components/schemas/TimeseriesCollectionItems' - description: timeseries collection items payload - required: true responses: "200": content: application/json: schema: - items: - additionalProperties: - type: string - type: object - type: array + $ref: '#/components/schemas/service.ReportConfigWithPlotConfigs' description: OK "400": content: @@ -7803,18 +7930,15 @@ paths: schema: $ref: '#/components/schemas/echo.HTTPError' description: Internal Server Error - security: - - Bearer: [] - summary: creates one or more timeseries + summary: Lists all plot configs for a report config tags: - - timeseries - x-codegen-request-body-name: timeseries_collection_items - /timeseries/{timeseries_id}: - delete: + - report-config + /report_jobs/{job_id}: + put: parameters: - - description: timeseries uuid + - description: download job uuid in: path - name: timeseries_id + name: job_id required: true schema: format: uuid @@ -7822,8 +7946,16 @@ paths: - description: api key in: query name: key + required: true schema: type: string + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/dto.ReportDownloadJob' + description: report download job payload + required: true responses: "200": content: @@ -7850,26 +7982,32 @@ paths: schema: $ref: '#/components/schemas/echo.HTTPError' description: Internal Server Error - security: - - Bearer: [] - summary: deletes a single timeseries by id + summary: updates a job that creates a pdf report tags: - - timeseries + - report-config + x-codegen-request-body-name: report_download_job + /search/projects: get: parameters: - - description: timeseries uuid + - description: "entity to search (i.e. projects, etc.)" in: path - name: timeseries_id + name: entity required: true schema: - format: uuid + type: string + - description: search string + in: query + name: q + schema: type: string responses: "200": content: application/json: schema: - $ref: '#/components/schemas/Timeseries' + items: + $ref: '#/components/schemas/db.VProject' + type: array description: OK "400": content: @@ -7889,14 +8027,15 @@ paths: schema: $ref: '#/components/schemas/echo.HTTPError' description: Internal Server Error - summary: gets a single timeseries by id + summary: allows searching using a string on different entities tags: - - timeseries + - search + /submittals/{submittal_id}/verify_missing: put: parameters: - - description: timeseries uuid + - description: submittal uuid in: path - name: timeseries_id + name: submittal_id required: true schema: format: uuid @@ -7906,20 +8045,12 @@ paths: name: key schema: type: string - requestBody: - content: - '*/*': - schema: - $ref: '#/components/schemas/Timeseries' - description: timeseries payload - required: true responses: "200": content: application/json: schema: - additionalProperties: - type: string + additionalProperties: true type: object description: OK "400": @@ -7942,27 +8073,67 @@ paths: description: Internal Server Error security: - Bearer: [] - summary: updates a single timeseries by id + summary: verifies the specified submittal is "missing" and will not be completed tags: - - timeseries - x-codegen-request-body-name: timeseries - /timeseries/{timeseries_id}/inclinometer_measurements: - delete: + - submittal + /timeseries: + post: parameters: - - description: timeseries uuid + - description: api key + in: query + name: key + schema: + type: string + requestBody: + content: + '*/*': + schema: + $ref: '#/components/schemas/dto.TimeseriesCollectionItems' + description: timeseries collection items payload + required: true + responses: + "200": + content: + application/json: + schema: + additionalProperties: + type: string + type: object + description: OK + "400": + content: + application/json: + schema: + $ref: '#/components/schemas/echo.HTTPError' + description: Bad Request + "404": + content: + application/json: + schema: + $ref: '#/components/schemas/echo.HTTPError' + description: Not Found + "500": + content: + application/json: + schema: + $ref: '#/components/schemas/echo.HTTPError' + description: Internal Server Error + security: + - Bearer: [] + summary: creates one or more timeseries + tags: + - timeseries + x-codegen-request-body-name: timeseries_collection_items + /timeseries/{timeseries_id}: + delete: + parameters: + - description: timeseries uuid in: path name: timeseries_id required: true schema: format: uuid type: string - - description: timestamp of measurement to delete - in: query - name: time - required: true - schema: - format: date-time - type: string - description: api key in: query name: key @@ -7996,9 +8167,9 @@ paths: description: Internal Server Error security: - Bearer: [] - summary: deletes a single inclinometer measurement by timestamp + summary: deletes a single timeseries by id tags: - - measurement-inclinometer + - timeseries get: parameters: - description: timeseries uuid @@ -8008,24 +8179,61 @@ paths: schema: format: uuid type: string - - description: after timestamp - in: query - name: after + responses: + "200": + content: + application/json: + schema: + $ref: '#/components/schemas/db.VTimeseries' + description: OK + "400": + content: + application/json: + schema: + $ref: '#/components/schemas/echo.HTTPError' + description: Bad Request + "404": + content: + application/json: + schema: + $ref: '#/components/schemas/echo.HTTPError' + description: Not Found + "500": + content: + application/json: + schema: + $ref: '#/components/schemas/echo.HTTPError' + description: Internal Server Error + summary: gets a single timeseries by id + tags: + - timeseries + put: + parameters: + - description: timeseries uuid + in: path + name: timeseries_id + required: true schema: - format: date-time + format: uuid type: string - - description: before timestamp + - description: api key in: query - name: before + name: key schema: - format: date-time type: string + requestBody: + content: + '*/*': + schema: + $ref: '#/components/schemas/dto.Timeseries' + description: timeseries payload + required: true responses: "200": content: application/json: schema: - $ref: '#/components/schemas/InclinometerMeasurementCollection' + $ref: '#/components/schemas/dto.Timeseries' description: OK "400": content: @@ -8045,9 +8253,12 @@ paths: schema: $ref: '#/components/schemas/echo.HTTPError' description: Internal Server Error - summary: lists all measurements for an inclinometer + security: + - Bearer: [] + summary: updates a single timeseries by id tags: - - measurement-inclinometer + - timeseries + x-codegen-request-body-name: timeseries /timeseries/{timeseries_id}/measurements: delete: parameters: @@ -8132,7 +8343,9 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/MeasurementCollection' + items: + $ref: '#/components/schemas/db.MeasurementCollection' + type: array description: OK "400": content: @@ -8157,27 +8370,22 @@ paths: - timeseries /timeseries_measurements: post: - parameters: - - description: api key - in: query - name: key - required: true - schema: - type: string requestBody: content: - '*/*': + application/json: schema: - $ref: '#/components/schemas/TimeseriesMeasurementCollectionCollection' - description: array of timeseries measurement collections - required: true + $ref: '#/components/schemas/_timeseries_measurements_post_request' + multipart/form-data: + schema: + $ref: '#/components/schemas/_timeseries_measurements_post_request' responses: "200": content: application/json: schema: items: - $ref: '#/components/schemas/MeasurementCollection' + additionalProperties: true + type: object type: array description: OK "400": @@ -8198,7 +8406,9 @@ paths: schema: $ref: '#/components/schemas/echo.HTTPError' description: Internal Server Error - summary: creates or updates one or more timeseries measurements + security: + - Bearer: [] + summary: creates one or more timeseries measurements tags: - measurement x-codegen-request-body-name: timeseries_measurement_collections @@ -8210,7 +8420,7 @@ paths: application/json: schema: items: - $ref: '#/components/schemas/Unit' + $ref: '#/components/schemas/db.VUnit' type: array description: OK "400": @@ -8231,49 +8441,7 @@ components: message: type: object type: object - geojson.Geometry: - example: - geometries: - - null - - null - coordinates: "{}" - type: type - properties: - coordinates: - type: object - geometries: - items: - $ref: '#/components/schemas/geojson.Geometry' - type: array - type: - type: string - type: object - AggregatePlotConfigMeasurementsContourPlot: - example: - x: - - 0.8008281904610115 - - 0.8008281904610115 - "y": - - 6.027456183070403 - - 6.027456183070403 - z: - - 1.4658129805029452 - - 1.4658129805029452 - properties: - x: - items: - type: number - type: array - "y": - items: - type: number - type: array - z: - items: - type: number - type: array - type: object - Alert: + db.AlertGetRow: example: instruments: - instrument_name: instrument_name @@ -8284,22 +8452,22 @@ components: alert_config_id: alert_config_id project_id: project_id name: name + created_at: created_at id: id body: body - create_date: create_date project_name: project_name properties: alert_config_id: type: string body: type: string - create_date: + created_at: type: string id: type: string instruments: items: - $ref: '#/components/schemas/AlertConfigInstrument' + $ref: '#/components/schemas/db.InstrumentIDName' type: array name: type: string @@ -8310,103 +8478,44 @@ components: read: type: boolean type: object - AlertConfig: + db.AlertListForProfileRow: example: - updater_username: updater_username - alert_type_id: alert_type_id - creator_username: creator_username - remind_interval: remind_interval - body: body - project_name: project_name - alert_type: alert_type - update_date: update_date instruments: - instrument_name: instrument_name instrument_id: instrument_id - instrument_name: instrument_name instrument_id: instrument_id + read: true + alert_config_id: alert_config_id project_id: project_id - last_checked: last_checked - mute_consecutive_alerts: true - creator_id: creator_id - last_reminded: last_reminded name: name - updater_id: updater_id - schedule_interval: schedule_interval + created_at: created_at id: id - alert_email_subscriptions: - - user_type: user_type - id: id - email: email - username: username - - user_type: user_type - id: id - email: email - username: username - create_date: create_date - warning_interval: warning_interval - start_date: start_date + body: body + project_name: project_name properties: - alert_email_subscriptions: - items: - $ref: '#/components/schemas/EmailAutocompleteResult' - type: array - alert_type: - type: string - alert_type_id: + alert_config_id: type: string body: type: string - create_date: - type: string - creator_id: - type: string - creator_username: + created_at: type: string id: type: string instruments: items: - $ref: '#/components/schemas/AlertConfigInstrument' + $ref: '#/components/schemas/db.InstrumentIDName' type: array - last_checked: - type: string - last_reminded: - type: string - mute_consecutive_alerts: - type: boolean name: type: string project_id: type: string project_name: type: string - remind_interval: - type: string - schedule_interval: - type: string - start_date: - type: string - update_date: - type: string - updater_id: - type: string - updater_username: - type: string - warning_interval: - type: string - type: object - AlertConfigInstrument: - example: - instrument_name: instrument_name - instrument_id: instrument_id - properties: - instrument_id: - type: string - instrument_name: - type: string + read: + type: boolean type: object - AlertSubscription: + db.AlertProfileSubscription: example: alert_config_id: alert_config_id profile_id: profile_id @@ -8425,7 +8534,7 @@ components: profile_id: type: string type: object - AwareParameter: + db.AwareParameterListRow: example: id: id unit_id: unit_id @@ -8441,1053 +8550,1683 @@ components: unit_id: type: string type: object - AwarePlatformParameterConfig: - example: - aware_parameters: - key: aware_parameters - instrument_id: instrument_id - aware_id: aware_id - properties: - aware_id: - type: string - aware_parameters: - additionalProperties: - type: string - type: object - instrument_id: - type: string - type: object - CalculatedTimeseries: + db.CollectionGroup: example: - formula_name: formula_name - formula: formula + updated_at: updated_at + project_id: project_id + name: name + updated_by: updated_by + created_at: created_at id: id - instrument_id: instrument_id - unit_id: unit_id + created_by: created_by + sort_order: 0 slug: slug - parameter_id: parameter_id properties: - formula: + created_at: type: string - formula_name: + created_by: type: string id: type: string - instrument_id: + name: type: string - parameter_id: + project_id: type: string slug: type: string - unit_id: + sort_order: + type: integer + updated_at: + type: string + updated_by: type: string type: object - CollectionGroup: + db.CollectionGroupDetailsTimeseries: example: - updater_username: updater_username - project_id: project_id - creator_username: creator_username - creator_id: creator_id - name: name - updater_id: updater_id - id: id - create_date: create_date + instrument: instrument + type: standard + instrument_id: instrument_id + unit: unit + parameter: parameter + name: name + variable: "{}" + latest_value: 6.027456183070403 + id: id + instrument_slug: instrument_slug + is_computed: true + latest_time: latest_time + sort_order: 1 + unit_id: unit_id slug: slug - update_date: update_date + parameter_id: parameter_id properties: - create_date: + id: type: string - creator_id: + instrument: type: string - creator_username: + instrument_id: type: string - id: + instrument_slug: type: string + is_computed: + type: boolean + latest_time: + type: string + latest_value: + type: number name: type: string - project_id: + parameter: type: string - slug: + parameter_id: type: string - update_date: + slug: type: string - updater_id: + sort_order: + type: integer + type: + $ref: '#/components/schemas/db.TimeseriesType' + unit: type: string - updater_username: + unit_id: type: string + variable: + type: object type: object - CollectionGroupDetails: + db.DataloggerEquivalencyTableField: example: - updater_username: updater_username - timeseries: - - values: - - annotation: annotation - validated: true - masked: true - time: time - error: error - value: 0.8008281904610115 - - annotation: annotation - validated: true - masked: true - time: time - error: error - value: 0.8008281904610115 - instrument: instrument - type: type - instrument_id: instrument_id - unit: unit - parameter: parameter - name: name - variable: variable - latest_value: 0.8008281904610115 - id: id - instrument_slug: instrument_slug - is_computed: true - latest_time: latest_time - unit_id: unit_id - slug: slug - parameter_id: parameter_id - - values: - - annotation: annotation - validated: true - masked: true - time: time - error: error - value: 0.8008281904610115 - - annotation: annotation - validated: true - masked: true - time: time - error: error - value: 0.8008281904610115 - instrument: instrument - type: type - instrument_id: instrument_id - unit: unit - parameter: parameter - name: name - variable: variable - latest_value: 0.8008281904610115 - id: id - instrument_slug: instrument_slug - is_computed: true - latest_time: latest_time - unit_id: unit_id - slug: slug - parameter_id: parameter_id - project_id: project_id - creator_username: creator_username - creator_id: creator_id - name: name - updater_id: updater_id + timeseries_id: timeseries_id id: id - create_date: create_date - slug: slug - update_date: update_date + display_name: display_name + instrument_id: instrument_id + field_name: field_name properties: - create_date: - type: string - creator_id: + display_name: type: string - creator_username: + field_name: type: string id: type: string - name: - type: string - project_id: - type: string - slug: - type: string - timeseries: - items: - $ref: '#/components/schemas/collectionGroupDetailsTimeseries' - type: array - update_date: - type: string - updater_id: + instrument_id: type: string - updater_username: + timeseries_id: type: string type: object - Datalogger: + db.DataloggerTableIDName: example: - updater_username: updater_username - creator_username: creator_username - model_id: model_id - update_date: update_date - tables: - - id: id - table_name: table_name - - id: id - table_name: table_name - project_id: project_id - creator_id: creator_id - name: name - updater_id: updater_id - model: model id: id - sn: sn - create_date: create_date - errors: - - errors - - errors - slug: slug + table_name: table_name properties: - create_date: + id: type: string - creator_id: + table_name: type: string - creator_username: + type: object + db.DomainGroupOpt: + properties: + description: type: string - errors: - items: - type: string - type: array id: type: string - model: - type: string - model_id: + value: type: string - name: + type: object + db.EmailAutocompleteListRow: + example: + user_type: user_type + id: id + email: email + username: "{}" + properties: + email: type: string - project_id: + id: type: string - slug: + user_type: type: string - sn: + username: + type: object + type: object + db.EmailAutocompleteResult: + example: + user_type: user_type + id: id + email: email + username: username + properties: + email: type: string - tables: - items: - $ref: '#/components/schemas/DataloggerTable' - type: array - update_date: + id: type: string - updater_id: + user_type: type: string - updater_username: + username: type: string type: object - DataloggerTable: + db.HomeGetRow: + example: + new_instruments_7d: 1 + project_count: 5 + instrument_group_count: 6 + new_measurements_2h: 5 + instrument_count: 0 + properties: + instrument_count: + type: integer + instrument_group_count: + type: integer + new_instruments_7d: + type: integer + new_measurements_2h: + type: integer + project_count: + type: integer + type: object + db.IDSlugName: example: + name: name id: id - table_name: table_name + slug: slug properties: id: type: string - table_name: + name: + type: string + slug: type: string type: object - DataloggerTablePreview: + db.InstrumentCreateBatchRow: example: - preview: - bytes: - - 0 - - 0 - status: 6 - datalogger_table_id: datalogger_table_id - update_date: update_date + id: id + slug: slug properties: - datalogger_table_id: + id: type: string - preview: - $ref: '#/components/schemas/pgtype.JSON' - update_date: + slug: type: string type: object - DataloggerWithKey: + db.InstrumentGroup: example: - updater_username: updater_username - creator_username: creator_username - model_id: model_id - update_date: update_date - tables: - - id: id - table_name: table_name - - id: id - table_name: table_name + deleted: true + updated_at: updated_at project_id: project_id - creator_id: creator_id name: name - updater_id: updater_id - model: model + updated_by: updated_by + created_at: created_at + description: description id: id - sn: sn - create_date: create_date - errors: - - errors - - errors - key: key + created_by: created_by slug: slug properties: - create_date: + created_at: type: string - creator_id: + created_by: type: string - creator_username: + deleted: + type: boolean + description: type: string - errors: - items: - type: string - type: array id: type: string - key: - type: string - model: - type: string - model_id: - type: string name: type: string project_id: type: string slug: type: string - sn: - type: string - tables: - items: - $ref: '#/components/schemas/DataloggerTable' - type: array - update_date: - type: string - updater_id: + updated_at: type: string - updater_username: + updated_by: type: string type: object - District: + db.InstrumentGroupUpdateRow: example: - office_id: office_id - agency: agency - initials: initials - division_initials: division_initials - division_name: division_name + updated_at: updated_at + project_id: project_id name: name + updated_by: updated_by + created_at: created_at + description: description id: id + created_by: created_by + slug: slug properties: - agency: + created_at: type: string - division_initials: + created_by: type: string - division_name: + description: type: string id: type: string - initials: - type: string name: type: string - office_id: + project_id: + type: string + slug: + type: string + updated_at: + type: string + updated_by: type: string type: object - DistrictRollup: + db.InstrumentIDName: example: - expected_total_submittals: 6 - office_id: office_id - alert_type_id: alert_type_id - month: month - project_id: project_id - red_submittals: 5 - green_submittals: 1 - yellow_submittals: 5 - actual_total_submittals: 0 - district_initials: district_initials - project_name: project_name + instrument_name: instrument_name + instrument_id: instrument_id properties: - actual_total_submittals: - type: integer - alert_type_id: + instrument_id: type: string - district_initials: + instrument_name: type: string - expected_total_submittals: - type: integer - green_submittals: - type: integer - month: + type: object + db.InstrumentNote: + example: + updated_at: updated_at + updated_by: updated_by + created_at: created_at + id: id + time: time + body: body + title: title + created_by: created_by + instrument_id: instrument_id + properties: + body: type: string - office_id: + created_at: type: string - project_id: + created_by: type: string - project_name: + id: type: string - red_submittals: - type: integer - yellow_submittals: + instrument_id: + type: string + time: + type: string + title: + type: string + updated_at: + type: string + updated_by: + type: string + type: object + db.IpiMeasurement: + example: + elevation: 6.027456183070403 + temp: 5.637376656633329 + inc_dev: 1.4658129805029452 + tilt: 2.3021358869347655 + segment_id: 5 + cum_dev: 0.8008281904610115 + properties: + cum_dev: + type: number + elevation: + type: number + inc_dev: + type: number + segment_id: type: integer + temp: + type: number + tilt: + type: number type: object - Domain: + db.JobStatus: + enum: + - SUCCESS + - FAIL + - INIT + type: string + x-enum-varnames: + - JobStatusSUCCESS + - JobStatusFAIL + - JobStatusINIT + db.Measurement: example: - description: description - id: id - value: value - group: group + annotation: annotation + validated: true + masked: true + time: time + error: error + value: 0.8008281904610115 properties: - description: + annotation: type: string - group: + error: type: string - id: + masked: + type: boolean + time: type: string + validated: + type: boolean value: - type: string + type: number type: object - DomainGroupOption: + db.MeasurementCollection: + example: + timeseries_id: timeseries_id + items: + - annotation: annotation + validated: true + masked: true + time: time + error: error + value: 0.8008281904610115 + - annotation: annotation + validated: true + masked: true + time: time + error: error + value: 0.8008281904610115 properties: - description: - type: string - id: + items: + items: + $ref: '#/components/schemas/db.Measurement' + type: array + timeseries_id: type: string - value: + type: object + db.MeasurementCollectionLean: + example: + timeseries_id: timeseries_id + items: + - null + - null + properties: + items: + items: + $ref: '#/components/schemas/db.MeasurementLean' + type: array + timeseries_id: type: string type: object - DomainMap: + db.MeasurementLean: additionalProperties: - items: - $ref: '#/components/schemas/DomainGroupOption' - type: array + type: number type: object - EmailAutocompleteResult: + db.PgTimezoneNamesListRow: example: - user_type: user_type - id: id - email: email - username: username + utc_offset: utc_offset + name: name + abbrev: abbrev + is_dst: true properties: - email: - type: string - id: + abbrev: type: string - user_type: + is_dst: + type: boolean + name: type: string - username: + utc_offset: type: string type: object - EquivalencyTable: + db.PlotConfigMeasurementListBullseyeRow: example: - datalogger_table_id: datalogger_table_id - datalogger_table_name: datalogger_table_name - rows: - - timeseries_id: timeseries_id - id: id - display_name: display_name - instrument_id: instrument_id - field_name: field_name - - timeseries_id: timeseries_id - id: id - display_name: display_name - instrument_id: instrument_id - field_name: field_name - datalogger_id: datalogger_id + x: "{}" + "y": "{}" + time: time properties: - datalogger_id: - type: string - datalogger_table_id: - type: string - datalogger_table_name: + time: type: string - rows: - items: - $ref: '#/components/schemas/EquivalencyTableRow' - type: array + x: + type: object + "y": + type: object type: object - EquivalencyTableRow: + db.PlotType: + enum: + - scatter-line + - profile + - contour + - bullseye + type: string + x-enum-varnames: + - PlotTypeScatterLine + - PlotTypeProfile + - PlotTypeContour + - PlotTypeBullseye + db.ProfileCreateRow: example: - timeseries_id: timeseries_id id: id display_name: display_name - instrument_id: instrument_id - field_name: field_name + email: email + username: username properties: display_name: type: string - field_name: + email: type: string id: type: string - instrument_id: - type: string - timeseries_id: + username: type: string type: object - Evaluation: + db.ProfileProjectRoleGetRow: example: - end_date: end_date - updater_username: updater_username - alert_config_id: alert_config_id - creator_username: creator_username - alert_config_name: alert_config_name - body: body - project_name: project_name - submittal_id: submittal_id - update_date: update_date - instruments: - - instrument_name: instrument_name - instrument_id: instrument_id - - instrument_name: instrument_name - instrument_id: instrument_id - project_id: project_id - creator_id: creator_id - name: name - updater_id: updater_id + role: role + role_id: role_id + profile_id: profile_id id: id - create_date: create_date - start_date: start_date + email: email + username: username properties: - alert_config_id: - type: string - alert_config_name: - type: string - body: - type: string - create_date: - type: string - creator_id: - type: string - creator_username: - type: string - end_date: + email: type: string id: type: string - instruments: - items: - $ref: '#/components/schemas/EvaluationInstrument' - type: array - name: + profile_id: type: string - project_id: + role: type: string - project_name: + role_id: type: string - start_date: + username: type: string - submittal_id: + type: object + db.ProfileProjectRoleListForProjectRow: + example: + role: role + role_id: role_id + profile_id: profile_id + id: id + email: email + username: username + properties: + email: type: string - update_date: + id: type: string - updater_id: + profile_id: type: string - updater_username: + role: type: string - type: object - EvaluationInstrument: - example: - instrument_name: instrument_name - instrument_id: instrument_id - properties: - instrument_id: + role_id: type: string - instrument_name: + username: type: string type: object - Geometry: + db.ProjectCreateBatchRow: example: - geometries: - - geometries: - - null - - null - coordinates: "{}" - type: type - - geometries: - - null - - null - coordinates: "{}" - type: type - coordinates: "{}" - type: type + id: id + slug: slug properties: - coordinates: - type: object - geometries: - items: - $ref: '#/components/schemas/geojson.Geometry' - type: array - type: + id: type: string - type: object - Heartbeat: - example: - time: time - properties: - time: + slug: type: string type: object - Home: + db.ReportConfigGlobalOverrides: example: - new_instruments_7d: 1 - project_count: 5 - instrument_group_count: 6 - new_measurements_2h: 5 - instrument_count: 0 + date_range: + value: value + enabled: true + show_nonvalidated: + value: true + enabled: true + show_masked: + value: true + enabled: true properties: - instrument_count: - type: integer - instrument_group_count: - type: integer - new_instruments_7d: - type: integer - new_measurements_2h: - type: integer - project_count: - type: integer + date_range: + $ref: '#/components/schemas/db.TextOption' + show_masked: + $ref: '#/components/schemas/db.ToggleOption' + show_nonvalidated: + $ref: '#/components/schemas/db.ToggleOption' type: object - IDSlugName: + db.ReportDownloadJob: example: - name: name + progress_updated_at: progress_updated_at + file_key: file_key + report_config_id: report_config_id + created_at: created_at + progress: 0 + file_expiry: file_expiry id: id - slug: slug + created_by: created_by + status: SUCCESS properties: - id: + created_at: type: string - name: + created_by: type: string - slug: + file_expiry: type: string - type: object - InclinometerMeasurement: - example: - creator: creator - values: - - 0 - - 0 - time: time - create_date: create_date - properties: - create_date: + file_key: type: string - creator: + id: type: string - time: + progress: + type: integer + progress_updated_at: type: string - values: - items: - type: integer - type: array - type: object - InclinometerMeasurementCollection: - example: - timeseries_id: timeseries_id - inclinometers: - - creator: creator - values: - - 0 - - 0 - time: time - create_date: create_date - - creator: creator - values: - - 0 - - 0 - time: time - create_date: create_date - properties: - inclinometers: - items: - $ref: '#/components/schemas/InclinometerMeasurement' - type: array - timeseries_id: + report_config_id: type: string + status: + $ref: '#/components/schemas/db.JobStatus' type: object - InclinometerMeasurementCollectionCollection: + db.SaaMeasurement: example: - items: - - timeseries_id: timeseries_id - inclinometers: - - creator: creator - values: - - 0 - - 0 - time: time - create_date: create_date - - creator: creator - values: - - 0 - - 0 - time: time - create_date: create_date - - timeseries_id: timeseries_id - inclinometers: - - creator: creator - values: - - 0 - - 0 - time: time - create_date: create_date - - creator: creator - values: - - 0 - - 0 - time: time - create_date: create_date + elevation: 0.8008281904610115 + temp: 1.4658129805029452 + z_cum_dev: 1.2315135367772556 + y_increment: 4.145608029883936 + x_cum_dev: 7.061401241503109 + temp_increment: 5.637376656633329 + z_increment: 1.0246457001441578 + y_cum_dev: 2.027123023002322 + x_increment: 9.301444243932576 + x: 2.3021358869347655 + "y": 3.616076749251911 + z: 7.386281948385884 + segment_id: 6 + temp_cum_dev: 5.962133916683182 properties: - items: - items: - $ref: '#/components/schemas/InclinometerMeasurementCollection' - type: array + elevation: + type: number + segment_id: + type: integer + temp: + type: number + temp_cum_dev: + type: number + temp_increment: + type: number + x: + type: number + x_cum_dev: + type: number + x_increment: + type: number + "y": + type: number + y_cum_dev: + type: number + y_increment: + type: number + z: + type: number + z_cum_dev: + type: number + z_increment: + type: number type: object - InclinometerMeasurementCollectionLean: + db.TextOption: example: - timeseries_id: timeseries_id - items: - - null - - null + value: value + enabled: true properties: - items: - items: - $ref: '#/components/schemas/InclinometerMeasurementLean' - type: array - timeseries_id: + enabled: + type: boolean + value: type: string type: object - InclinometerMeasurementLean: - additionalProperties: - items: - type: integer - type: array - type: object - Instrument: + db.TimeseriesComputedListForInstrumentRow: example: - has_cwms: true - projects: - - name: name - id: id - slug: slug - - name: name - id: id - slug: slug - alert_configs: - - alert_configs - - alert_configs - icon: icon - type: type - aware_id: aware_id - status_id: status_id - opts: - key: "" - station: 6 - constants: - - constants - - constants + formula_name: formula_name + formula: formula id: id - status_time: status_time - create_date: create_date + instrument_id: instrument_id + unit_id: unit_id slug: slug - updater_username: updater_username - offset: 0 - creator_username: creator_username - type_id: type_id - show_cwms_tab: true - usgs_id: usgs_id - groups: - - groups - - groups - update_date: update_date - creator_id: creator_id - name: name - updater_id: updater_id - geometry: - geometries: - - geometries: - - null - - null - coordinates: "{}" - type: type - - geometries: - - null - - null - coordinates: "{}" - type: type - coordinates: "{}" - type: type - nid_id: nid_id - status: status + parameter_id: parameter_id properties: - alert_configs: - items: - type: string - type: array - aware_id: - type: string - constants: - items: - type: string - type: array - create_date: - type: string - creator_id: - type: string - creator_username: + formula: type: string - geometry: - $ref: '#/components/schemas/Geometry' - groups: - items: - type: string - type: array - has_cwms: - type: boolean - icon: + formula_name: type: string id: type: string - name: + instrument_id: type: string - nid_id: + parameter_id: type: string - offset: - type: integer - opts: - additionalProperties: true - type: object - projects: - items: - $ref: '#/components/schemas/IDSlugName' - type: array - show_cwms_tab: - type: boolean slug: type: string - station: - type: integer - status: - type: string - status_id: - type: string - status_time: - type: string - type: - type: string - type_id: - type: string - update_date: - type: string - updater_id: - type: string - updater_username: - type: string - usgs_id: + unit_id: type: string type: object - InstrumentCount: - example: - instrument_count: 0 - properties: - instrument_count: - type: integer - type: object - InstrumentGroup: + db.TimeseriesCreateBatchRow: example: - updater_username: updater_username - creator_username: creator_username - description: description - instrument_count: 0 - update_date: update_date - project_id: project_id - creator_id: creator_id name: name - timeseries_count: 6 - updater_id: updater_id id: id - create_date: create_date + type: standard + instrument_id: instrument_id + unit_id: unit_id slug: slug + parameter_id: parameter_id properties: - create_date: - type: string - creator_id: - type: string - creator_username: - type: string - description: - type: string id: type: string - instrument_count: - type: integer + instrument_id: + type: string name: type: string - project_id: + parameter_id: type: string slug: type: string - timeseries_count: - type: integer - update_date: + type: + $ref: '#/components/schemas/db.TimeseriesType' + unit_id: + type: string + type: object + db.TimeseriesType: + enum: + - standard + - constant + - computed + - cwms + type: string + x-enum-varnames: + - TimeseriesTypeStandard + - TimeseriesTypeConstant + - TimeseriesTypeComputed + - TimeseriesTypeCwms + db.ToggleOption: + example: + value: true + enabled: true + properties: + enabled: + type: boolean + value: + type: boolean + type: object + db.UploaderConfigMapping: + example: + timeseries_id: timeseries_id + uploader_config_id: uploader_config_id + field_name: field_name + properties: + field_name: type: string - updater_id: + timeseries_id: type: string - updater_username: + uploader_config_id: type: string type: object - InstrumentNote: + db.UploaderConfigType: + enum: + - csv + - dux + - toa5 + type: string + x-enum-varnames: + - UploaderConfigTypeCsv + - UploaderConfigTypeDux + - UploaderConfigTypeToa5 + db.VAlert: example: - updater_username: updater_username - creator_username: creator_username - creator_id: creator_id - updater_id: updater_id + instruments: + - instrument_name: instrument_name + instrument_id: instrument_id + - instrument_name: instrument_name + instrument_id: instrument_id + alert_config_id: alert_config_id + project_id: project_id + name: name + created_at: created_at id: id - time: time body: body - create_date: create_date - title: title - instrument_id: instrument_id - update_date: update_date + project_name: project_name properties: - body: - type: string - create_date: + alert_config_id: type: string - creator_id: + body: type: string - creator_username: + created_at: type: string id: type: string - instrument_id: - type: string - time: + instruments: + items: + $ref: '#/components/schemas/db.InstrumentIDName' + type: array + name: type: string - title: + project_id: type: string - update_date: + project_name: type: string - updater_id: + type: object + db.VAlertConfig: + example: + alert_type_id: alert_type_id + created_at: created_at + remind_interval: remind_interval + create_next_submittal_from: create_next_submittal_from + body: body + project_name: project_name + created_by: created_by + alert_type: alert_type + last_checked_at: last_checked_at + updated_by_username: updated_by_username + instruments: + - instrument_name: instrument_name + instrument_id: instrument_id + - instrument_name: instrument_name + instrument_id: instrument_id + updated_at: updated_at + project_id: project_id + mute_consecutive_alerts: true + name: name + updated_by: updated_by + schedule_interval: schedule_interval + started_at: started_at + created_by_username: created_by_username + id: id + alert_email_subscriptions: + - user_type: user_type + id: id + email: email + username: username + - user_type: user_type + id: id + email: email + username: username + last_reminded_at: last_reminded_at + warning_interval: warning_interval + properties: + alert_email_subscriptions: + items: + $ref: '#/components/schemas/db.EmailAutocompleteResult' + type: array + alert_type: type: string - updater_username: + alert_type_id: + type: string + body: + type: string + create_next_submittal_from: + type: string + created_at: + type: string + created_by: + type: string + created_by_username: + type: string + id: + type: string + instruments: + items: + $ref: '#/components/schemas/db.InstrumentIDName' + type: array + last_checked_at: + type: string + last_reminded_at: + type: string + mute_consecutive_alerts: + type: boolean + name: + type: string + project_id: + type: string + project_name: + type: string + remind_interval: + type: string + schedule_interval: + type: string + started_at: + type: string + updated_at: + type: string + updated_by: + type: string + updated_by_username: + type: string + warning_interval: type: string type: object - InstrumentNoteCollection: + db.VCollectionGroupDetails: example: - items: - - updater_username: updater_username - creator_username: creator_username - creator_id: creator_id - updater_id: updater_id - id: id - time: time - body: body - create_date: create_date - title: title + timeseries: + - instrument: instrument + type: standard instrument_id: instrument_id - update_date: update_date - - updater_username: updater_username - creator_username: creator_username - creator_id: creator_id - updater_id: updater_id + unit: unit + parameter: parameter + name: name + variable: "{}" + latest_value: 6.027456183070403 id: id - time: time - body: body - create_date: create_date - title: title + instrument_slug: instrument_slug + is_computed: true + latest_time: latest_time + sort_order: 1 + unit_id: unit_id + slug: slug + parameter_id: parameter_id + - instrument: instrument + type: standard instrument_id: instrument_id - update_date: update_date + unit: unit + parameter: parameter + name: name + variable: "{}" + latest_value: 6.027456183070403 + id: id + instrument_slug: instrument_slug + is_computed: true + latest_time: latest_time + sort_order: 1 + unit_id: unit_id + slug: slug + parameter_id: parameter_id + updated_at: updated_at + project_id: project_id + name: name + updated_by: updated_by + created_at: created_at + id: id + created_by: created_by + sort_order: 0 + slug: slug properties: - items: + created_at: + type: string + created_by: + type: string + id: + type: string + name: + type: string + project_id: + type: string + slug: + type: string + sort_order: + type: integer + timeseries: items: - $ref: '#/components/schemas/InstrumentNote' + $ref: '#/components/schemas/db.CollectionGroupDetailsTimeseries' type: array + updated_at: + type: string + updated_by: + type: string type: object - InstrumentProjectAssignments: + db.VDatalogger: example: - project_ids: - - project_ids - - project_ids + created_at: created_at + model_id: model_id + created_by: created_by + updated_by_username: updated_by_username + tables: + - id: id + table_name: table_name + - id: id + table_name: table_name + updated_at: updated_at + project_id: project_id + name: name + updated_by: updated_by + created_by_username: created_by_username + model: model + id: id + sn: sn + errors: + - errors + - errors + slug: slug properties: - project_ids: + created_at: + type: string + created_by: + type: string + created_by_username: + type: string + errors: items: type: string type: array + id: + type: string + model: + type: string + model_id: + type: string + name: + type: string + project_id: + type: string + slug: + type: string + sn: + type: string + tables: + items: + $ref: '#/components/schemas/db.DataloggerTableIDName' + type: array + updated_at: + type: string + updated_by: + type: string + updated_by_username: + type: string type: object - InstrumentStatus: + db.VDataloggerEquivalencyTable: example: - status_id: status_id + datalogger_table_id: datalogger_table_id + datalogger_table_name: datalogger_table_name + fields: + - timeseries_id: timeseries_id + id: id + display_name: display_name + instrument_id: instrument_id + field_name: field_name + - timeseries_id: timeseries_id + id: id + display_name: display_name + instrument_id: instrument_id + field_name: field_name + datalogger_id: datalogger_id + properties: + datalogger_id: + type: string + datalogger_table_id: + type: string + datalogger_table_name: + type: string + fields: + items: + $ref: '#/components/schemas/db.DataloggerEquivalencyTableField' + type: array + type: object + db.VDataloggerPreview: + example: + preview: + - 0 + - 0 + updated_at: updated_at + datalogger_table_id: datalogger_table_id + properties: + datalogger_table_id: + type: string + preview: + items: + type: integer + type: array + updated_at: + type: string + type: object + db.VDistrict: + example: + office_id: office_id + agency: agency + initials: initials + division_initials: division_initials + division_name: division_name + name: name id: id - time: time - status: status properties: + agency: + type: string + division_initials: + type: string + division_name: + type: string + id: + type: string + initials: + type: string + name: + type: string + office_id: + type: string + type: object + db.VDistrictRollup: + example: + expected_total_submittals: 6 + office_id: office_id + alert_type_id: alert_type_id + month: month + project_id: project_id + red_submittals: 5 + green_submittals: 1 + yellow_submittals: 5 + actual_total_submittals: 0 + district_initials: district_initials + project_name: project_name + properties: + actual_total_submittals: + type: integer + alert_type_id: + type: string + district_initials: + type: string + expected_total_submittals: + type: integer + green_submittals: + type: integer + month: + type: string + office_id: + type: string + project_id: + type: string + project_name: + type: string + red_submittals: + type: integer + yellow_submittals: + type: integer + type: object + db.VDomain: + properties: + description: + type: string + group: + type: string id: type: string - status: + value: + type: string + type: object + db.VEvaluation: + example: + alert_config_id: alert_config_id + created_at: created_at + alert_config_name: alert_config_name + body: body + project_name: project_name + created_by: created_by + submittal_id: submittal_id + updated_by_username: updated_by_username + instruments: + - instrument_name: instrument_name + instrument_id: instrument_id + - instrument_name: instrument_name + instrument_id: instrument_id + updated_at: updated_at + project_id: project_id + name: name + updated_by: updated_by + started_at: started_at + created_by_username: created_by_username + id: id + ended_at: ended_at + properties: + alert_config_id: + type: string + alert_config_name: + type: string + body: + type: string + created_at: + type: string + created_by: + type: string + created_by_username: + type: string + ended_at: + type: string + id: + type: string + instruments: + items: + $ref: '#/components/schemas/db.InstrumentIDName' + type: array + name: + type: string + project_id: + type: string + project_name: + type: string + started_at: + type: string + submittal_id: + type: string + updated_at: + type: string + updated_by: + type: string + updated_by_username: + type: string + type: object + db.VInclMeasurement: + example: + time: time + instrument_id: instrument_id + measurements: "{}" + properties: + instrument_id: + type: string + measurements: + type: object + time: + type: string + type: object + db.VInclSegment: + example: + depth_timeseries_id: depth_timeseries_id + b180_timeseries_id: b180_timeseries_id + a180_timeseries_id: a180_timeseries_id + id: 0 + instrument_id: instrument_id + a0_timeseries_id: a0_timeseries_id + b0_timeseries_id: b0_timeseries_id + properties: + a0_timeseries_id: + type: string + a180_timeseries_id: + type: string + b0_timeseries_id: + type: string + b180_timeseries_id: + type: string + depth_timeseries_id: + type: string + id: + type: integer + instrument_id: + type: string + type: object + db.VInstrument: + example: + has_cwms: true + projects: + - name: name + id: id + slug: slug + - name: name + id: id + slug: slug + alert_configs: + - alert_configs + - alert_configs + icon: icon + created_at: created_at + type: type + status_id: status_id + opts: "{}" + updated_at: updated_at + station: 1 + constants: + - constants + - constants + id: id + status_time: status_time + slug: slug + offset: 6 + type_id: type_id + show_cwms_tab: true + usgs_id: usgs_id + groups: + - groups + - groups + created_by: created_by + name: name + updated_by: updated_by + geometry: + - 0 + - 0 + nid_id: nid_id + telemetry: + - name: name + id: id + slug: slug + - name: name + id: id + slug: slug + status: status + properties: + alert_configs: + items: + type: string + type: array + constants: + items: + type: string + type: array + created_at: + type: string + created_by: + type: string + geometry: + items: + type: integer + type: array + groups: + items: + type: string + type: array + has_cwms: + type: boolean + icon: + type: string + id: + type: string + name: + type: string + nid_id: + type: string + offset: + type: integer + opts: + type: object + projects: + items: + $ref: '#/components/schemas/db.IDSlugName' + type: array + show_cwms_tab: + type: boolean + slug: + type: string + station: + type: integer + status: + type: string + status_id: + type: string + status_time: + type: string + telemetry: + items: + $ref: '#/components/schemas/db.IDSlugName' + type: array + type: + type: string + type_id: + type: string + updated_at: + type: string + updated_by: + type: string + usgs_id: + type: string + type: object + db.VInstrumentGroup: + example: + updated_at: updated_at + project_id: project_id + name: name + timeseries_count: "{}" + updated_by: updated_by + created_at: created_at + description: description + id: id + created_by: created_by + instrument_count: 0 + slug: slug + properties: + created_at: + type: string + created_by: + type: string + description: + type: string + id: + type: string + instrument_count: + type: integer + name: + type: string + project_id: + type: string + slug: + type: string + timeseries_count: + type: object + updated_at: + type: string + updated_by: + type: string + type: object + db.VInstrumentStatus: + example: + status_id: status_id + id: id + time: time + instrument_id: instrument_id + status: status + properties: + id: + type: string + instrument_id: + type: string + status: + type: string + status_id: + type: string + time: + type: string + type: object + db.VIpiMeasurement: + example: + time: time + instrument_id: instrument_id + measurements: + - elevation: 6.027456183070403 + temp: 5.637376656633329 + inc_dev: 1.4658129805029452 + tilt: 2.3021358869347655 + segment_id: 5 + cum_dev: 0.8008281904610115 + - elevation: 6.027456183070403 + temp: 5.637376656633329 + inc_dev: 1.4658129805029452 + tilt: 2.3021358869347655 + segment_id: 5 + cum_dev: 0.8008281904610115 + properties: + instrument_id: + type: string + measurements: + items: + $ref: '#/components/schemas/db.IpiMeasurement' + type: array + time: + type: string + type: object + db.VIpiSegment: + example: + length: 6.027456183070403 + tilt_timeseries_id: tilt_timeseries_id + id: 0 + inc_dev_timeseries_id: inc_dev_timeseries_id + instrument_id: instrument_id + length_timeseries_id: length_timeseries_id + properties: + id: + type: integer + inc_dev_timeseries_id: + type: string + instrument_id: + type: string + length: + type: number + length_timeseries_id: + type: string + tilt_timeseries_id: + type: string + type: object + db.VPlotConfiguration: + example: + date_range: date_range + display: "{}" + show_comments: true + report_configs: + - name: name + id: id + slug: slug + - name: name + id: id + slug: slug + created_at: created_at + auto_range: true + show_masked: true + threshold: 0 + created_by: created_by + show_nonvalidated: true + updated_at: updated_at + project_id: project_id + name: name + updated_by: updated_by + id: id + plot_type: scatter-line + slug: slug + properties: + auto_range: + type: boolean + created_at: + type: string + created_by: + type: string + date_range: + type: string + display: + type: object + id: + type: string + name: + type: string + plot_type: + $ref: '#/components/schemas/db.PlotType' + project_id: + type: string + report_configs: + items: + $ref: '#/components/schemas/db.IDSlugName' + type: array + show_comments: + type: boolean + show_masked: + type: boolean + show_nonvalidated: + type: boolean + slug: + type: string + threshold: + type: integer + updated_at: + type: string + updated_by: + type: string + type: object + db.VProfile: + example: + is_admin: true + roles: + - roles + - roles + tokens: + - token_id: token_id + issued: issued + - token_id: token_id + issued: issued + id: id + display_name: display_name + edipi: 0 + email: email + username: username + properties: + display_name: + type: string + edipi: + type: integer + email: + type: string + id: + type: string + is_admin: + type: boolean + roles: + items: + type: string + type: array + tokens: + items: + $ref: '#/components/schemas/db.VProfileToken' + type: array + username: + type: string + type: object + db.VProfileToken: + example: + token_id: token_id + issued: issued + properties: + issued: + type: string + token_id: + type: string + type: object + db.VProject: + example: + image: "{}" + federal_id: federal_id + created_at: created_at + created_by: created_by + instrument_count: 0 + office_id: office_id + updated_by_username: updated_by_username + instrument_group_count: 6 + updated_at: updated_at + name: name + updated_by: updated_by + created_by_username: created_by_username + district_id: district_id + id: id + slug: slug + properties: + created_at: + type: string + created_by: + type: string + created_by_username: + type: string + district_id: + type: string + federal_id: + type: string + id: + type: string + image: + type: object + instrument_count: + type: integer + instrument_group_count: + type: integer + name: + type: string + office_id: + type: string + slug: + type: string + updated_at: + type: string + updated_by: + type: string + updated_by_username: + type: string + type: object + db.VReportConfig: + example: + global_overrides: + date_range: + value: value + enabled: true + show_nonvalidated: + value: true + enabled: true + show_masked: + value: true + enabled: true + created_at: created_at + description: description + project_name: project_name + created_by: created_by + updated_by_username: updated_by_username + district_name: district_name + updated_at: updated_at + project_id: project_id + name: name + updated_by: updated_by + created_by_username: created_by_username + id: id + plot_configs: + - name: name + id: id + slug: slug + - name: name + id: id + slug: slug + slug: slug + properties: + created_at: + type: string + created_by: + type: string + created_by_username: + type: string + description: + type: string + district_name: + type: string + global_overrides: + $ref: '#/components/schemas/db.ReportConfigGlobalOverrides' + id: + type: string + name: + type: string + plot_configs: + items: + $ref: '#/components/schemas/db.IDSlugName' + type: array + project_id: + type: string + project_name: + type: string + slug: + type: string + updated_at: type: string - status_id: + updated_by: type: string - time: + updated_by_username: type: string type: object - InstrumentStatusCollection: - example: - items: - - status_id: status_id - id: id - time: time - status: status - - status_id: status_id - id: id - time: time - status: status - properties: - items: - items: - $ref: '#/components/schemas/InstrumentStatus' - type: array - type: object - InstrumentsValidation: - example: - is_valid: true - errors: - - errors - - errors - properties: - errors: - items: - type: string - type: array - is_valid: - type: boolean - type: object - IpiMeasurements: + db.VSaaMeasurement: example: time: time + instrument_id: instrument_id measurements: - - elevation: 6.027456183070403 - temp: 5.637376656633329 - inc_dev: 1.4658129805029452 - tilt: 2.3021358869347655 - segment_id: 5 - cum_dev: 0.8008281904610115 - - elevation: 6.027456183070403 - temp: 5.637376656633329 - inc_dev: 1.4658129805029452 - tilt: 2.3021358869347655 - segment_id: 5 - cum_dev: 0.8008281904610115 + - elevation: 0.8008281904610115 + temp: 1.4658129805029452 + z_cum_dev: 1.2315135367772556 + y_increment: 4.145608029883936 + x_cum_dev: 7.061401241503109 + temp_increment: 5.637376656633329 + z_increment: 1.0246457001441578 + y_cum_dev: 2.027123023002322 + x_increment: 9.301444243932576 + x: 2.3021358869347655 + "y": 3.616076749251911 + z: 7.386281948385884 + segment_id: 6 + temp_cum_dev: 5.962133916683182 + - elevation: 0.8008281904610115 + temp: 1.4658129805029452 + z_cum_dev: 1.2315135367772556 + y_increment: 4.145608029883936 + x_cum_dev: 7.061401241503109 + temp_increment: 5.637376656633329 + z_increment: 1.0246457001441578 + y_cum_dev: 2.027123023002322 + x_increment: 9.301444243932576 + x: 2.3021358869347655 + "y": 3.616076749251911 + z: 7.386281948385884 + segment_id: 6 + temp_cum_dev: 5.962133916683182 properties: + instrument_id: + type: string measurements: items: - $ref: '#/components/schemas/IpiSegmentMeasurement' + $ref: '#/components/schemas/db.SaaMeasurement' type: array time: type: string type: object - IpiSegment: + db.VSaaSegment: example: + z_timeseries_id: z_timeseries_id temp_timeseries_id: temp_timeseries_id + y_timeseries_id: y_timeseries_id + x_timeseries_id: x_timeseries_id length: 6.027456183070403 - tilt_timeseries_id: tilt_timeseries_id id: 0 - inc_dev_timeseries_id: inc_dev_timeseries_id instrument_id: instrument_id length_timeseries_id: length_timeseries_id properties: id: type: integer - inc_dev_timeseries_id: - type: string instrument_id: type: string length: @@ -9496,1307 +10235,1240 @@ components: type: string temp_timeseries_id: type: string - tilt_timeseries_id: + x_timeseries_id: + type: string + y_timeseries_id: + type: string + z_timeseries_id: type: string type: object - IpiSegmentMeasurement: + db.VSubmittal: example: - elevation: 6.027456183070403 - temp: 5.637376656633329 - inc_dev: 1.4658129805029452 - tilt: 2.3021358869347655 - segment_id: 5 - cum_dev: 0.8008281904610115 + alert_type_id: alert_type_id + alert_config_id: alert_config_id + created_at: created_at + alert_config_name: alert_config_name + submittal_status_id: submittal_status_id + submittal_status_name: submittal_status_name + warning_sent: true + completed_at: completed_at + project_id: project_id + alert_type_name: alert_type_name + marked_as_missing: true + due_at: due_at + id: id properties: - cum_dev: - type: number - elevation: - type: number - inc_dev: - type: number - segment_id: - type: integer - temp: - type: number - tilt: - type: number + alert_config_id: + type: string + alert_config_name: + type: string + alert_type_id: + type: string + alert_type_name: + type: string + completed_at: + type: string + created_at: + type: string + due_at: + type: string + id: + type: string + marked_as_missing: + type: boolean + project_id: + type: string + submittal_status_id: + type: string + submittal_status_name: + type: string + warning_sent: + type: boolean type: object - Measurement: + db.VTimeseries: example: - annotation: annotation - validated: true - masked: true - time: time - error: error - value: 0.8008281904610115 + instrument: instrument + type: standard + instrument_id: instrument_id + unit: unit + parameter: parameter + name: name + variable: "{}" + id: id + instrument_slug: instrument_slug + is_computed: true + unit_id: unit_id + slug: slug + parameter_id: parameter_id properties: - annotation: + id: type: string - error: + instrument: type: string - masked: - type: boolean - time: + instrument_id: type: string - validated: + instrument_slug: + type: string + is_computed: type: boolean - value: - type: number + name: + type: string + parameter: + type: string + parameter_id: + type: string + slug: + type: string + type: + $ref: '#/components/schemas/db.TimeseriesType' + unit: + type: string + unit_id: + type: string + variable: + type: object type: object - MeasurementCollection: + db.VTimeseriesCwms: example: - timeseries_id: timeseries_id - items: - - annotation: annotation - validated: true - masked: true - time: time - error: error - value: 0.8008281904610115 - - annotation: annotation - validated: true - masked: true - time: time - error: error - value: 0.8008281904610115 + cwms_office_id: cwms_office_id + instrument: instrument + cwms_extent_earliest_time: cwms_extent_earliest_time + type: standard + cwms_timeseries_id: cwms_timeseries_id + instrument_id: instrument_id + unit: unit + parameter: parameter + cwms_extent_latest_time: cwms_extent_latest_time + name: name + variable: "{}" + id: id + instrument_slug: instrument_slug + is_computed: true + unit_id: unit_id + slug: slug + parameter_id: parameter_id properties: - items: - items: - $ref: '#/components/schemas/Measurement' - type: array - timeseries_id: + cwms_extent_earliest_time: + type: string + cwms_extent_latest_time: + type: string + cwms_office_id: + type: string + cwms_timeseries_id: + type: string + id: + type: string + instrument: + type: string + instrument_id: + type: string + instrument_slug: + type: string + is_computed: + type: boolean + name: + type: string + parameter: + type: string + parameter_id: + type: string + slug: + type: string + type: + $ref: '#/components/schemas/db.TimeseriesType' + unit: + type: string + unit_id: type: string + variable: + type: object type: object - MeasurementCollectionLean: + db.VUnit: example: - timeseries_id: timeseries_id - items: - - null - - null + measure: measure + unit_family_id: unit_family_id + name: name + unit_family: unit_family + id: id + abbreviation: abbreviation + measure_id: measure_id properties: - items: - items: - $ref: '#/components/schemas/MeasurementLean' - type: array - timeseries_id: + abbreviation: + type: string + id: + type: string + measure: + type: string + measure_id: + type: string + name: + type: string + unit_family: + type: string + unit_family_id: type: string type: object - MeasurementLean: - additionalProperties: - type: number - type: object - Opts: - additionalProperties: true - type: object - PlotConfig: + db.VUploaderConfig: example: - date_range: date_range - updater_username: updater_username - creator_username: creator_username - display: - key: "" - show_comments: true - report_configs: - - name: name - id: id - slug: slug - - name: name - id: id - slug: slug - auto_range: true - show_masked: true - threshold: 0 - update_date: update_date - show_nonvalidated: true + validated_field: validated_field + created_at: created_at + description: description + row_offset: 6 + comment_field: comment_field + type: csv + created_by: created_by + updated_by_username: updated_by_username + time_field: time_field + masked_field: masked_field + tz_name: tz_name + updated_at: updated_at + comment_field_enabled: true project_id: project_id - creator_id: creator_id + column_offset: 0 name: name - updater_id: updater_id + updated_by: updated_by + masked_field_enabled: true + created_by_username: created_by_username id: id - create_date: create_date - plot_type: plot_type + validated_field_enabled: true slug: slug properties: - auto_range: + column_offset: + type: integer + comment_field: + type: string + comment_field_enabled: type: boolean - create_date: + created_at: type: string - creator_id: + created_by: type: string - creator_username: + created_by_username: type: string - date_range: + description: type: string - display: - additionalProperties: true - type: object id: type: string - name: + masked_field: type: string - plot_type: + masked_field_enabled: + type: boolean + name: type: string project_id: type: string - report_configs: - items: - $ref: '#/components/schemas/IDSlugName' - type: array - show_comments: - type: boolean - show_masked: - type: boolean - show_nonvalidated: - type: boolean + row_offset: + type: integer slug: type: string - threshold: - type: integer - update_date: + time_field: + type: string + type: + $ref: '#/components/schemas/db.UploaderConfigType' + tz_name: + type: string + updated_at: type: string - updater_id: + updated_by: type: string - updater_username: + updated_by_username: type: string + validated_field: + type: string + validated_field_enabled: + type: boolean type: object - PlotConfigBullseyePlot: - example: - date_range: date_range - updater_username: updater_username - creator_username: creator_username - display: - y_axis_timeseries_id: y_axis_timeseries_id - x_axis_timeseries_id: x_axis_timeseries_id - show_comments: true - report_configs: - - name: name - id: id - slug: slug - - name: name - id: id - slug: slug - auto_range: true - show_masked: true - threshold: 0 - update_date: update_date - show_nonvalidated: true - project_id: project_id - creator_id: creator_id - name: name - updater_id: updater_id - id: id - create_date: create_date - plot_type: plot_type - slug: slug + dto.AlertConfig: properties: - auto_range: - type: boolean - create_date: - type: string - creator_id: + alert_email_subscriptions: + items: + $ref: '#/components/schemas/dto.EmailAutocompleteResult' + type: array + alert_type: type: string - creator_username: + alert_type_id: type: string - date_range: + body: type: string - display: - $ref: '#/components/schemas/PlotConfigBullseyePlotDisplay' - id: + created_at: type: string - name: + created_by: type: string - plot_type: + created_by_username: type: string - project_id: + id: type: string - report_configs: + instruments: items: - $ref: '#/components/schemas/IDSlugName' + $ref: '#/components/schemas/dto.AlertConfigInstrument' type: array - show_comments: - type: boolean - show_masked: - type: boolean - show_nonvalidated: + last_checked: + type: string + last_reminded: + type: string + mute_consecutive_alerts: type: boolean - slug: + name: type: string - threshold: - type: integer - update_date: + project_id: + type: string + project_name: + type: string + remind_interval: + type: string + schedule_interval: + type: string + started_at: + type: string + updated_by: type: string - updater_id: + updated_by_username: type: string - updater_username: + updatedd_at: + type: string + warning_interval: type: string type: object - PlotConfigBullseyePlotDisplay: - example: - y_axis_timeseries_id: y_axis_timeseries_id - x_axis_timeseries_id: x_axis_timeseries_id + dto.AlertConfigInstrument: properties: - x_axis_timeseries_id: + instrument_id: type: string - y_axis_timeseries_id: + instrument_name: + type: string + type: object + dto.AlertSubscription: + properties: + alert_config_id: + type: string + id: + type: string + mute_notify: + type: boolean + mute_ui: + type: boolean + profile_id: type: string type: object - PlotConfigContourPlot: + dto.CalculatedTimeseries: example: - date_range: date_range - updater_username: updater_username - creator_username: creator_username - display: - contour_smoothing: true - gradient_smoothing: true - locf_backfill: locf_backfill - timeseries_ids: - - timeseries_ids - - timeseries_ids - show_labels: true - time: time - show_comments: true - report_configs: - - name: name - id: id - slug: slug - - name: name - id: id - slug: slug - auto_range: true - show_masked: true - threshold: 0 - update_date: update_date - show_nonvalidated: true - project_id: project_id - creator_id: creator_id - name: name - updater_id: updater_id + formula_name: formula_name + formula: formula id: id - create_date: create_date - plot_type: plot_type + instrument_id: instrument_id + unit_id: unit_id slug: slug + parameter_id: parameter_id properties: - auto_range: - type: boolean - create_date: + formula: type: string - creator_id: + formula_name: type: string - creator_username: + id: type: string - date_range: + instrument_id: + type: string + parameter_id: + type: string + slug: + type: string + unit_id: + type: string + type: object + dto.CollectionGroup: + properties: + created_at: + type: string + created_by: + type: string + created_by_username: type: string - display: - $ref: '#/components/schemas/PlotConfigContourPlotDisplay' id: type: string name: type: string - plot_type: - type: string project_id: type: string - report_configs: + slug: + type: string + sort_order: + type: integer + updated_by: + type: string + updated_by_username: + type: string + updatedd_at: + type: string + type: object + dto.Datalogger: + properties: + created_at: + type: string + created_by: + type: string + created_by_username: + type: string + errors: items: - $ref: '#/components/schemas/IDSlugName' + type: string type: array - show_comments: - type: boolean - show_masked: - type: boolean - show_nonvalidated: - type: boolean + id: + type: string + model: + type: string + model_id: + type: string + name: + type: string + project_id: + type: string slug: type: string - threshold: - type: integer - update_date: + sn: + type: string + tables: + items: + $ref: '#/components/schemas/dto.DataloggerTable' + type: array + updated_by: type: string - updater_id: + updated_by_username: type: string - updater_username: + updatedd_at: type: string type: object - PlotConfigContourPlotDisplay: - example: - contour_smoothing: true - gradient_smoothing: true - locf_backfill: locf_backfill - timeseries_ids: - - timeseries_ids - - timeseries_ids - show_labels: true - time: time + dto.DataloggerTable: properties: - contour_smoothing: - type: boolean - gradient_smoothing: - type: boolean - locf_backfill: + id: type: string - show_labels: - type: boolean - time: + table_name: + type: string + type: object + dto.EmailAutocompleteResult: + properties: + email: + type: string + id: + type: string + user_type: + type: string + username: + type: string + type: object + dto.EquivalencyTable: + properties: + datalogger_id: + type: string + datalogger_table_id: type: string - timeseries_ids: + datalogger_table_name: + type: string + rows: items: - type: string + $ref: '#/components/schemas/dto.EquivalencyTableRow' type: array type: object - PlotConfigMeasurementBullseyePlot: - example: - x: 0.8008281904610115 - "y": 6.027456183070403 - time: time + dto.EquivalencyTableRow: properties: - time: + display_name: + type: string + field_name: + type: string + id: + type: string + instrument_id: + type: string + timeseries_id: type: string - x: - type: number - "y": - type: number type: object - PlotConfigProfilePlot: + dto.Evaluation: example: - date_range: date_range - updater_username: updater_username - creator_username: creator_username - display: - instrument_type: instrument_type + alert_config_id: alert_config_id + created_at: created_at + alert_config_name: alert_config_name + body: body + project_name: project_name + created_by: created_by + submittal_id: submittal_id + updated_by_username: updated_by_username + instruments: + - instrument_name: instrument_name + instrument_id: instrument_id + - instrument_name: instrument_name instrument_id: instrument_id - show_comments: true - report_configs: - - name: name - id: id - slug: slug - - name: name - id: id - slug: slug - auto_range: true - show_masked: true - threshold: 0 - update_date: update_date - show_nonvalidated: true project_id: project_id - creator_id: creator_id name: name - updater_id: updater_id + updated_by: updated_by + started_at: started_at + updatedd_at: updatedd_at + created_by_username: created_by_username id: id - create_date: create_date - plot_type: plot_type - slug: slug + ended_at: ended_at properties: - auto_range: - type: boolean - create_date: + alert_config_id: type: string - creator_id: + alert_config_name: type: string - creator_username: + body: type: string - date_range: + created_at: type: string - display: - $ref: '#/components/schemas/PlotConfigProfilePlotDisplay' - id: + created_by: type: string - name: + created_by_username: type: string - plot_type: + ended_at: type: string - project_id: + id: type: string - report_configs: + instruments: items: - $ref: '#/components/schemas/IDSlugName' + $ref: '#/components/schemas/dto.EvaluationInstrument' type: array - show_comments: - type: boolean - show_masked: - type: boolean - show_nonvalidated: - type: boolean - slug: + name: type: string - threshold: - type: integer - update_date: + project_id: + type: string + project_name: type: string - updater_id: + started_at: type: string - updater_username: + submittal_id: + type: string + updated_by: + type: string + updated_by_username: + type: string + updatedd_at: type: string type: object - PlotConfigProfilePlotDisplay: + dto.EvaluationInstrument: example: - instrument_type: instrument_type + instrument_name: instrument_name instrument_id: instrument_id properties: instrument_id: type: string - instrument_type: + instrument_name: type: string type: object - PlotConfigScatterLineCustomShape: + dto.IDSlugName: example: - color: color - data_point: 0.8008281904610115 name: name - plot_configuration_id: plot_configuration_id - enabled: true + id: id + slug: slug properties: - color: + id: type: string - data_point: - type: number - enabled: - type: boolean name: type: string - plot_configuration_id: + slug: type: string type: object - PlotConfigScatterLineDisplay: - example: - layout: - custom_shapes: - - color: color - data_point: 0.8008281904610115 - name: name - plot_configuration_id: plot_configuration_id - enabled: true - - color: color - data_point: 0.8008281904610115 - name: name - plot_configuration_id: plot_configuration_id - enabled: true - y_axis_title: y_axis_title - y2_axis_title: y2_axis_title - traces: - - trace_type: trace_type - color: color - show_markers: true - timeseries_id: timeseries_id - y_axis: y_axis - parameter: parameter - name: name - width: 1.4658129805029452 - line_style: line_style - plot_configuration_id: plot_configuration_id - trace_order: 6 - - trace_type: trace_type - color: color - show_markers: true - timeseries_id: timeseries_id - y_axis: y_axis - parameter: parameter - name: name - width: 1.4658129805029452 - line_style: line_style - plot_configuration_id: plot_configuration_id - trace_order: 6 - properties: - layout: - $ref: '#/components/schemas/PlotConfigScatterLineLayout' - traces: - items: - $ref: '#/components/schemas/PlotConfigScatterLineTimeseriesTrace' - type: array - type: object - PlotConfigScatterLineLayout: + dto.InclSegment: example: - custom_shapes: - - color: color - data_point: 0.8008281904610115 - name: name - plot_configuration_id: plot_configuration_id - enabled: true - - color: color - data_point: 0.8008281904610115 - name: name - plot_configuration_id: plot_configuration_id - enabled: true - y_axis_title: y_axis_title - y2_axis_title: y2_axis_title + depth_timeseries_id: depth_timeseries_id + b180_timeseries_id: b180_timeseries_id + a180_timeseries_id: a180_timeseries_id + id: 0 + instrument_id: instrument_id + a0_timeseries_id: a0_timeseries_id + b0_timeseries_id: b0_timeseries_id properties: - custom_shapes: - items: - $ref: '#/components/schemas/PlotConfigScatterLineCustomShape' - type: array - y2_axis_title: + a0_timeseries_id: type: string - y_axis_title: + a180_timeseries_id: + type: string + b0_timeseries_id: + type: string + b180_timeseries_id: + type: string + depth_timeseries_id: + type: string + id: + type: integer + instrument_id: type: string type: object - PlotConfigScatterLinePlot: + dto.Instrument: example: - date_range: date_range - updater_username: updater_username - creator_username: creator_username - display: - layout: - custom_shapes: - - color: color - data_point: 0.8008281904610115 - name: name - plot_configuration_id: plot_configuration_id - enabled: true - - color: color - data_point: 0.8008281904610115 - name: name - plot_configuration_id: plot_configuration_id - enabled: true - y_axis_title: y_axis_title - y2_axis_title: y2_axis_title - traces: - - trace_type: trace_type - color: color - show_markers: true - timeseries_id: timeseries_id - y_axis: y_axis - parameter: parameter - name: name - width: 1.4658129805029452 - line_style: line_style - plot_configuration_id: plot_configuration_id - trace_order: 6 - - trace_type: trace_type - color: color - show_markers: true - timeseries_id: timeseries_id - y_axis: y_axis - parameter: parameter - name: name - width: 1.4658129805029452 - line_style: line_style - plot_configuration_id: plot_configuration_id - trace_order: 6 - show_comments: true - report_configs: + has_cwms: true + projects: - name: name id: id slug: slug - name: name id: id slug: slug - auto_range: true - show_masked: true - threshold: 5 - update_date: update_date - show_nonvalidated: true - project_id: project_id - creator_id: creator_id - name: name - updater_id: updater_id + alert_configs: + - alert_configs + - alert_configs + icon: icon + created_at: created_at + type: type + aware_id: aware_id + updated_by_username: updated_by_username + status_id: status_id + opts: + key: "" + station: 1 + created_by_username: created_by_username + constants: + - constants + - constants id: id - create_date: create_date - plot_type: plot_type + status_time: status_time slug: slug + offset: 6 + type_id: type_id + show_cwms_tab: true + usgs_id: usgs_id + groups: + - groups + - groups + created_by: created_by + name: name + updated_by: updated_by + updatedd_at: updatedd_at + geometry: + - 0 + - 0 + nid_id: nid_id + status: status properties: - auto_range: - type: boolean - create_date: + alert_configs: + items: + type: string + type: array + aware_id: type: string - creator_id: + constants: + items: + type: string + type: array + created_at: type: string - creator_username: + created_by: type: string - date_range: + created_by_username: + type: string + geometry: + items: + type: integer + type: array + groups: + items: + type: string + type: array + has_cwms: + type: boolean + icon: type: string - display: - $ref: '#/components/schemas/PlotConfigScatterLineDisplay' id: type: string name: type: string - plot_type: - type: string - project_id: + nid_id: type: string - report_configs: + offset: + type: integer + opts: + additionalProperties: true + type: object + projects: items: - $ref: '#/components/schemas/IDSlugName' + $ref: '#/components/schemas/dto.IDSlugName' type: array - show_comments: - type: boolean - show_masked: - type: boolean - show_nonvalidated: + show_cwms_tab: type: boolean slug: type: string - threshold: + station: type: integer - update_date: - type: string - updater_id: - type: string - updater_username: - type: string - type: object - PlotConfigScatterLineTimeseriesTrace: - example: - trace_type: trace_type - color: color - show_markers: true - timeseries_id: timeseries_id - y_axis: y_axis - parameter: parameter - name: name - width: 1.4658129805029452 - line_style: line_style - plot_configuration_id: plot_configuration_id - trace_order: 6 - properties: - color: - type: string - line_style: - type: string - name: - description: read-only - type: string - parameter: - description: read-only - type: string - plot_configuration_id: + status: type: string - show_markers: - type: boolean - timeseries_id: + status_id: type: string - trace_order: - type: integer - trace_type: + status_time: type: string - width: - type: number - y_axis: - description: "y1 or y2, default y1" + type: type: string - type: object - Profile: - example: - is_admin: true - roles: - - roles - - roles - tokens: - - token_id: token_id - issued: issued - - token_id: token_id - issued: issued - id: id - display_name: display_name - email: email - username: username - properties: - display_name: + type_id: type: string - email: + updated_by: type: string - id: + updated_by_username: type: string - is_admin: - type: boolean - roles: - items: - type: string - type: array - tokens: - items: - $ref: '#/components/schemas/TokenInfoProfile' - type: array - username: + updatedd_at: + type: string + usgs_id: type: string type: object - Project: - example: - image: image - updater_username: updater_username - federal_id: federal_id - creator_username: creator_username - instrument_count: 0 - update_date: update_date - office_id: office_id - instrument_group_count: 6 - creator_id: creator_id - name: name - updater_id: updater_id - district_id: district_id - id: id - create_date: create_date - slug: slug + dto.InstrumentGroup: properties: - create_date: + created_at: type: string - creator_id: + created_by: type: string - creator_username: + created_by_username: type: string - district_id: - type: string - federal_id: + description: type: string id: type: string - image: - type: string instrument_count: type: integer - instrument_group_count: - type: integer name: type: string - office_id: + project_id: type: string slug: type: string - update_date: + timeseries_count: + type: integer + updated_by: type: string - updater_id: + updated_by_username: type: string - updater_username: + updatedd_at: type: string type: object - ProjectCount: - example: - project_count: 0 + dto.InstrumentNote: properties: - project_count: - type: integer + body: + type: string + created_at: + type: string + created_by: + type: string + created_by_username: + type: string + id: + type: string + instrument_id: + type: string + time: + type: string + title: + type: string + updated_by: + type: string + updated_by_username: + type: string + updatedd_at: + type: string type: object - ProjectInstrumentAssignments: - example: - instrument_ids: - - instrument_ids - - instrument_ids + dto.InstrumentNoteCollection: properties: - instrument_ids: + items: + items: + $ref: '#/components/schemas/dto.InstrumentNote' + type: array + type: object + dto.InstrumentProjectAssignments: + properties: + project_ids: items: type: string type: array type: object - ProjectMembership: - example: - role: role - role_id: role_id - profile_id: profile_id - id: id - email: email - username: username + dto.InstrumentStatus: properties: - email: + id: + type: string + status: + type: string + status_id: + type: string + time: type: string + type: object + dto.InstrumentStatusCollection: + properties: + items: + items: + $ref: '#/components/schemas/dto.InstrumentStatus' + type: array + type: object + dto.IpiSegment: + example: + temp_timeseries_id: temp_timeseries_id + length: 6.027456183070403 + tilt_timeseries_id: tilt_timeseries_id + id: 0 + inc_dev_timeseries_id: inc_dev_timeseries_id + instrument_id: instrument_id + length_timeseries_id: length_timeseries_id + properties: id: + type: integer + inc_dev_timeseries_id: type: string - profile_id: + instrument_id: type: string - role: + length: + type: number + length_timeseries_id: type: string - role_id: + temp_timeseries_id: type: string - username: + tilt_timeseries_id: type: string type: object - ReportConfig: + dto.Measurement: example: - updater_username: updater_username - creator_username: creator_username - global_overrides: - date_range: - value: value - enabled: true - show_nonvalidated: - value: true - enabled: true - show_masked: - value: true - enabled: true - description: description - project_name: project_name - update_date: update_date - district_name: district_name - project_id: project_id - creator_id: creator_id - name: name - updater_id: updater_id - id: id - create_date: create_date - plot_configs: - - name: name - id: id - slug: slug - - name: name - id: id - slug: slug - slug: slug + annotation: annotation + validated: true + masked: true + time: time + error: error + value: 0.8008281904610115 properties: - create_date: + annotation: type: string - creator_id: + error: type: string - creator_username: + masked: + type: boolean + time: type: string - description: + validated: + type: boolean + value: + type: number + type: object + dto.MeasurementCollection: + properties: + items: + items: + $ref: '#/components/schemas/dto.Measurement' + type: array + timeseries_id: type: string - district_name: + type: object + dto.Opts: + additionalProperties: true + type: object + dto.PlotConfigBullseyePlot: + properties: + auto_range: + type: boolean + created_at: type: string - global_overrides: - $ref: '#/components/schemas/ReportConfigGlobalOverrides' + created_by: + type: string + created_by_username: + type: string + date_range: + type: string + display: + $ref: '#/components/schemas/dto.PlotConfigBullseyePlotDisplay' id: type: string name: type: string - plot_configs: - items: - $ref: '#/components/schemas/IDSlugName' - type: array - project_id: + plot_type: type: string - project_name: + project_id: type: string + report_configs: + items: + $ref: '#/components/schemas/dto.IDSlugName' + type: array + show_comments: + type: boolean + show_masked: + type: boolean + show_nonvalidated: + type: boolean slug: type: string - update_date: + threshold: + type: integer + updated_by: type: string - updater_id: + updated_by_username: type: string - updater_username: + updatedd_at: type: string type: object - ReportConfigGlobalOverrides: - example: - date_range: - value: value - enabled: true - show_nonvalidated: - value: true - enabled: true - show_masked: - value: true - enabled: true + dto.PlotConfigBullseyePlotDisplay: properties: - date_range: - $ref: '#/components/schemas/TextOption' - show_masked: - $ref: '#/components/schemas/ToggleOption' - show_nonvalidated: - $ref: '#/components/schemas/ToggleOption' + x_axis_timeseries_id: + type: string + y_axis_timeseries_id: + type: string type: object - ReportConfigWithPlotConfigs: - example: - updater_username: updater_username - creator_username: creator_username - global_overrides: - date_range: - value: value - enabled: true - show_nonvalidated: - value: true - enabled: true - show_masked: - value: true - enabled: true - description: description - project_name: project_name - update_date: update_date - district_name: district_name - project_id: project_id - creator_id: creator_id - name: name - updater_id: updater_id - id: id - create_date: create_date - plot_configs: - - date_range: date_range - updater_username: updater_username - creator_username: creator_username - display: - layout: - custom_shapes: - - color: color - data_point: 0.8008281904610115 - name: name - plot_configuration_id: plot_configuration_id - enabled: true - - color: color - data_point: 0.8008281904610115 - name: name - plot_configuration_id: plot_configuration_id - enabled: true - y_axis_title: y_axis_title - y2_axis_title: y2_axis_title - traces: - - trace_type: trace_type - color: color - show_markers: true - timeseries_id: timeseries_id - y_axis: y_axis - parameter: parameter - name: name - width: 1.4658129805029452 - line_style: line_style - plot_configuration_id: plot_configuration_id - trace_order: 6 - - trace_type: trace_type - color: color - show_markers: true - timeseries_id: timeseries_id - y_axis: y_axis - parameter: parameter - name: name - width: 1.4658129805029452 - line_style: line_style - plot_configuration_id: plot_configuration_id - trace_order: 6 - show_comments: true - report_configs: - - name: name - id: id - slug: slug - - name: name - id: id - slug: slug - auto_range: true - show_masked: true - threshold: 5 - update_date: update_date - show_nonvalidated: true - project_id: project_id - creator_id: creator_id - name: name - updater_id: updater_id - id: id - create_date: create_date - plot_type: plot_type - slug: slug - - date_range: date_range - updater_username: updater_username - creator_username: creator_username - display: - layout: - custom_shapes: - - color: color - data_point: 0.8008281904610115 - name: name - plot_configuration_id: plot_configuration_id - enabled: true - - color: color - data_point: 0.8008281904610115 - name: name - plot_configuration_id: plot_configuration_id - enabled: true - y_axis_title: y_axis_title - y2_axis_title: y2_axis_title - traces: - - trace_type: trace_type - color: color - show_markers: true - timeseries_id: timeseries_id - y_axis: y_axis - parameter: parameter - name: name - width: 1.4658129805029452 - line_style: line_style - plot_configuration_id: plot_configuration_id - trace_order: 6 - - trace_type: trace_type - color: color - show_markers: true - timeseries_id: timeseries_id - y_axis: y_axis - parameter: parameter - name: name - width: 1.4658129805029452 - line_style: line_style - plot_configuration_id: plot_configuration_id - trace_order: 6 - show_comments: true - report_configs: - - name: name - id: id - slug: slug - - name: name - id: id - slug: slug - auto_range: true - show_masked: true - threshold: 5 - update_date: update_date - show_nonvalidated: true - project_id: project_id - creator_id: creator_id - name: name - updater_id: updater_id - id: id - create_date: create_date - plot_type: plot_type - slug: slug - slug: slug + dto.PlotConfigContourPlot: properties: - create_date: - type: string - creator_id: + auto_range: + type: boolean + created_at: type: string - creator_username: + created_by: type: string - description: + created_by_username: type: string - district_name: + date_range: type: string - global_overrides: - $ref: '#/components/schemas/ReportConfigGlobalOverrides' + display: + $ref: '#/components/schemas/dto.PlotConfigContourPlotDisplay' id: type: string name: type: string - plot_configs: + plot_type: + type: string + project_id: + type: string + report_configs: items: - $ref: '#/components/schemas/PlotConfigScatterLinePlot' + $ref: '#/components/schemas/dto.IDSlugName' type: array - project_id: + show_comments: + type: boolean + show_masked: + type: boolean + show_nonvalidated: + type: boolean + slug: type: string - project_name: + threshold: + type: integer + updated_by: type: string - slug: + updated_by_username: type: string - update_date: + updatedd_at: type: string - updater_id: + type: object + dto.PlotConfigContourPlotDisplay: + properties: + contour_smoothing: + type: boolean + gradient_smoothing: + type: boolean + locf_backfill: type: string - updater_username: + show_labels: + type: boolean + time: type: string + timeseries_ids: + items: + type: string + type: array type: object - ReportDownloadJob: - example: - file_key: file_key - creator: creator - progress_update_date: progress_update_date - report_config_id: report_config_id - progress: 0 - file_expiry: file_expiry - id: id - create_date: create_date - status: status + dto.PlotConfigProfilePlot: properties: - create_date: + auto_range: + type: boolean + created_at: type: string - creator: + created_by: type: string - file_expiry: + created_by_username: type: string - file_key: + date_range: type: string + display: + $ref: '#/components/schemas/dto.PlotConfigProfilePlotDisplay' id: type: string - progress: + name: + type: string + plot_type: + type: string + project_id: + type: string + report_configs: + items: + $ref: '#/components/schemas/dto.IDSlugName' + type: array + show_comments: + type: boolean + show_masked: + type: boolean + show_nonvalidated: + type: boolean + slug: + type: string + threshold: type: integer - progress_update_date: + updated_by: type: string - report_config_id: + updated_by_username: type: string - status: + updatedd_at: type: string type: object - SaaMeasurements: - example: - time: time - measurements: - - elevation: 0.8008281904610115 - temp: 1.4658129805029452 - z_cum_dev: 1.2315135367772556 - y_increment: 4.145608029883936 - x_cum_dev: 7.061401241503109 - temp_increment: 5.637376656633329 - z_increment: 1.0246457001441578 - y_cum_dev: 2.027123023002322 - x_increment: 9.301444243932576 - x: 2.3021358869347655 - "y": 3.616076749251911 - z: 7.386281948385884 - segment_id: 6 - temp_cum_dev: 5.962133916683182 - - elevation: 0.8008281904610115 - temp: 1.4658129805029452 - z_cum_dev: 1.2315135367772556 - y_increment: 4.145608029883936 - x_cum_dev: 7.061401241503109 - temp_increment: 5.637376656633329 - z_increment: 1.0246457001441578 - y_cum_dev: 2.027123023002322 - x_increment: 9.301444243932576 - x: 2.3021358869347655 - "y": 3.616076749251911 - z: 7.386281948385884 - segment_id: 6 - temp_cum_dev: 5.962133916683182 + dto.PlotConfigProfilePlotDisplay: properties: - measurements: + instrument_id: + type: string + instrument_type: + type: string + type: object + dto.PlotConfigScatterLineCustomShape: + properties: + color: + type: string + data_point: + type: number + enabled: + type: boolean + name: + type: string + plot_configuration_id: + type: string + type: object + dto.PlotConfigScatterLineDisplay: + properties: + layout: + $ref: '#/components/schemas/dto.PlotConfigScatterLineLayout' + traces: items: - $ref: '#/components/schemas/SaaSegmentMeasurement' + $ref: '#/components/schemas/dto.PlotConfigScatterLineTimeseriesTrace' type: array - time: + type: object + dto.PlotConfigScatterLineLayout: + properties: + custom_shapes: + items: + $ref: '#/components/schemas/dto.PlotConfigScatterLineCustomShape' + type: array + y2_axis_title: + type: string + y_axis_title: type: string type: object - SaaSegment: - example: - z_timeseries_id: z_timeseries_id - temp_timeseries_id: temp_timeseries_id - y_timeseries_id: y_timeseries_id - x_timeseries_id: x_timeseries_id - length: 6.027456183070403 - id: 0 - instrument_id: instrument_id - length_timeseries_id: length_timeseries_id + dto.PlotConfigScatterLinePlot: properties: + auto_range: + type: boolean + created_at: + type: string + created_by: + type: string + created_by_username: + type: string + date_range: + type: string + display: + $ref: '#/components/schemas/dto.PlotConfigScatterLineDisplay' id: - type: integer - instrument_id: type: string - length: - type: number - length_timeseries_id: + name: type: string - temp_timeseries_id: + plot_type: type: string - x_timeseries_id: + project_id: type: string - y_timeseries_id: + report_configs: + items: + $ref: '#/components/schemas/dto.IDSlugName' + type: array + show_comments: + type: boolean + show_masked: + type: boolean + show_nonvalidated: + type: boolean + slug: type: string - z_timeseries_id: + threshold: + type: integer + updated_by: + type: string + updated_by_username: + type: string + updatedd_at: type: string type: object - SaaSegmentMeasurement: - example: - elevation: 0.8008281904610115 - temp: 1.4658129805029452 - z_cum_dev: 1.2315135367772556 - y_increment: 4.145608029883936 - x_cum_dev: 7.061401241503109 - temp_increment: 5.637376656633329 - z_increment: 1.0246457001441578 - y_cum_dev: 2.027123023002322 - x_increment: 9.301444243932576 - x: 2.3021358869347655 - "y": 3.616076749251911 - z: 7.386281948385884 - segment_id: 6 - temp_cum_dev: 5.962133916683182 + dto.PlotConfigScatterLineTimeseriesTrace: properties: - elevation: - type: number - segment_id: + color: + type: string + line_style: + type: string + name: + description: read-only + type: string + parameter: + description: read-only + type: string + plot_configuration_id: + type: string + show_markers: + type: boolean + timeseries_id: + type: string + trace_order: type: integer - temp: - type: number - temp_cum_dev: - type: number - temp_increment: - type: number - x: - type: number - x_cum_dev: - type: number - x_increment: - type: number - "y": - type: number - y_cum_dev: - type: number - y_increment: - type: number - z: - type: number - z_cum_dev: - type: number - z_increment: + trace_type: + type: string + width: type: number + y_axis: + description: "y1 or y2, default y1" + type: string type: object - SearchResult: + dto.Project: example: - item: "{}" + image: image + federal_id: federal_id + created_at: created_at + created_by: created_by + instrument_count: 0 + office_id: office_id + updated_by_username: updated_by_username + instrument_group_count: 6 + name: name + updated_by: updated_by + updatedd_at: updatedd_at + created_by_username: created_by_username + district_id: district_id id: id - type: type + slug: slug properties: + created_at: + type: string + created_by: + type: string + created_by_username: + type: string + district_id: + type: string + federal_id: + type: string id: type: string - item: - type: object - type: + image: + type: string + instrument_count: + type: integer + instrument_group_count: + type: integer + name: type: string - type: object - Site: - properties: - description: + office_id: type: string - elevation: + slug: + type: string + updated_by: type: string - elevationUnits: + updated_by_username: type: string - siteName: - $ref: '#/components/schemas/SiteName' + updatedd_at: + type: string + type: object + dto.ProjectInstrumentAssignments: + properties: + instrument_ids: + items: + type: string + type: array type: object - SiteName: + dto.ReportConfig: properties: + created_at: + type: string + created_by: + type: string + created_by_username: + type: string + description: + type: string + district_name: + type: string + global_overrides: + $ref: '#/components/schemas/dto.ReportConfigGlobalOverrides' id: type: string - nameType: + name: + type: string + plot_configs: + items: + $ref: '#/components/schemas/dto.IDSlugName' + type: array + project_id: + type: string + project_name: + type: string + slug: + type: string + updated_by: + type: string + updated_by_username: + type: string + updatedd_at: type: string type: object - Submittal: - example: - alert_type_id: alert_type_id - alert_config_id: alert_config_id - due_date: due_date - alert_config_name: alert_config_name - submittal_status_id: submittal_status_id - submittal_status_name: submittal_status_name - warning_sent: true - project_id: project_id - alert_type_name: alert_type_name - marked_as_missing: true - completion_date: completion_date - id: id - create_date: create_date + dto.ReportConfigGlobalOverrides: properties: - alert_config_id: + date_range: + $ref: '#/components/schemas/dto.TextOption' + show_masked: + $ref: '#/components/schemas/dto.ToggleOption' + show_nonvalidated: + $ref: '#/components/schemas/dto.ToggleOption' + type: object + dto.ReportDownloadJob: + properties: + created_at: type: string - alert_config_name: + created_by: type: string - alert_type_id: + file_expiry: type: string - alert_type_name: + file_key: type: string - completion_date: + id: + type: string + progress: + type: integer + progress_updated_at: type: string - create_date: + report_config_id: type: string - due_date: + status: type: string + type: object + dto.SaaSegment: + example: + z_timeseries_id: z_timeseries_id + temp_timeseries_id: temp_timeseries_id + y_timeseries_id: y_timeseries_id + x_timeseries_id: x_timeseries_id + length: 6.027456183070403 + id: 0 + instrument_id: instrument_id + length_timeseries_id: length_timeseries_id + properties: id: + type: integer + instrument_id: type: string - marked_as_missing: - type: boolean - project_id: + length: + type: number + length_timeseries_id: type: string - submittal_status_id: + temp_timeseries_id: type: string - submittal_status_name: + x_timeseries_id: + type: string + y_timeseries_id: + type: string + z_timeseries_id: type: string - warning_sent: - type: boolean type: object - TextOption: - example: - value: value - enabled: true + dto.TextOption: properties: enabled: type: boolean value: type: string type: object - Timeseries: + dto.Timeseries: example: values: - annotation: annotation @@ -10851,73 +11523,19 @@ components: type: string values: items: - $ref: '#/components/schemas/Measurement' + $ref: '#/components/schemas/dto.Measurement' type: array variable: type: string type: object - TimeseriesCollectionItems: - example: - items: - - values: - - annotation: annotation - validated: true - masked: true - time: time - error: error - value: 0.8008281904610115 - - annotation: annotation - validated: true - masked: true - time: time - error: error - value: 0.8008281904610115 - instrument: instrument - type: type - instrument_id: instrument_id - unit: unit - parameter: parameter - name: name - variable: variable - id: id - instrument_slug: instrument_slug - is_computed: true - unit_id: unit_id - slug: slug - parameter_id: parameter_id - - values: - - annotation: annotation - validated: true - masked: true - time: time - error: error - value: 0.8008281904610115 - - annotation: annotation - validated: true - masked: true - time: time - error: error - value: 0.8008281904610115 - instrument: instrument - type: type - instrument_id: instrument_id - unit: unit - parameter: parameter - name: name - variable: variable - id: id - instrument_slug: instrument_slug - is_computed: true - unit_id: unit_id - slug: slug - parameter_id: parameter_id + dto.TimeseriesCollectionItems: properties: items: items: - $ref: '#/components/schemas/Timeseries' + $ref: '#/components/schemas/dto.Timeseries' type: array type: object - TimeseriesCwms: + dto.TimeseriesCwms: example: cwms_office_id: cwms_office_id values: @@ -10942,241 +11560,413 @@ components: parameter: parameter cwms_extent_latest_time: cwms_extent_latest_time name: name - variable: variable + variable: variable + id: id + instrument_slug: instrument_slug + is_computed: true + unit_id: unit_id + slug: slug + parameter_id: parameter_id + properties: + cwms_extent_earliest_time: + type: string + cwms_extent_latest_time: + type: string + cwms_office_id: + type: string + cwms_timeseries_id: + type: string + id: + type: string + instrument: + type: string + instrument_id: + type: string + instrument_slug: + type: string + is_computed: + type: boolean + name: + type: string + parameter: + type: string + parameter_id: + type: string + slug: + type: string + type: + type: string + unit: + type: string + unit_id: + type: string + values: + items: + $ref: '#/components/schemas/dto.Measurement' + type: array + variable: + type: string + type: object + dto.TimeseriesMeasurementCollectionCollection: + properties: + items: + items: + $ref: '#/components/schemas/dto.MeasurementCollection' + type: array + type: object + dto.ToggleOption: + properties: + enabled: + type: boolean + value: + type: boolean + type: object + dto.UploaderConfig: + properties: + column_offset: + type: integer + comment_field: + type: string + comment_field_enabled: + type: boolean + created_at: + type: string + created_by: + type: string + created_by_username: + type: string + description: + type: string + id: + type: string + masked_field: + type: string + masked_field_enabled: + type: boolean + name: + type: string + project_id: + type: string + row_offset: + type: integer + slug: + type: string + time_field: + type: string + type: + $ref: '#/components/schemas/dto.UploaderConfigType' + tz_name: + type: string + updated_by: + type: string + updated_by_username: + type: string + updatedd_at: + type: string + validated_field: + type: string + validated_field_enabled: + type: boolean + type: object + dto.UploaderConfigMapping: + example: + timeseries_id: timeseries_id + field_name: field_name + properties: + field_name: + type: string + timeseries_id: + type: string + type: object + dto.UploaderConfigType: + enum: + - csv + - dux + - toa5 + type: string + x-enum-varnames: + - CSV + - DUX + - TOA5 + service.AggregatePlotConfigMeasurementsContourPlot: + example: + x: + - 0.8008281904610115 + - 0.8008281904610115 + "y": + - 6.027456183070403 + - 6.027456183070403 + z: + - 1.4658129805029452 + - 1.4658129805029452 + properties: + x: + items: + type: number + type: array + "y": + items: + type: number + type: array + z: + items: + type: number + type: array + type: object + service.AwarePlatformParameterConfig: + example: + aware_parameters: + key: aware_parameters + instrument_id: instrument_id + aware_id: aware_id + properties: + aware_id: + type: string + aware_parameters: + additionalProperties: + type: string + type: object + instrument_id: + type: string + type: object + service.DataloggerWithKey: + example: + created_at: created_at + model_id: model_id + created_by: created_by + updated_by_username: updated_by_username + tables: + - id: id + table_name: table_name + - id: id + table_name: table_name + updated_at: updated_at + project_id: project_id + name: name + updated_by: updated_by + created_by_username: created_by_username + model: model id: id - instrument_slug: instrument_slug - is_computed: true - unit_id: unit_id + sn: sn + errors: + - errors + - errors + key: key slug: slug - parameter_id: parameter_id properties: - cwms_extent_earliest_time: - type: string - cwms_extent_latest_time: + created_at: type: string - cwms_office_id: + created_by: type: string - cwms_timeseries_id: + created_by_username: type: string + errors: + items: + type: string + type: array id: type: string - instrument: + key: type: string - instrument_id: + model: type: string - instrument_slug: + model_id: type: string - is_computed: - type: boolean name: type: string - parameter: - type: string - parameter_id: + project_id: type: string slug: type: string - type: - type: string - unit: - type: string - unit_id: + sn: type: string - values: + tables: items: - $ref: '#/components/schemas/Measurement' + $ref: '#/components/schemas/db.DataloggerTableIDName' type: array - variable: + updated_at: + type: string + updated_by: + type: string + updated_by_username: type: string type: object - TimeseriesMeasurementCollectionCollection: - example: - items: - - timeseries_id: timeseries_id - items: - - annotation: annotation - validated: true - masked: true - time: time - error: error - value: 0.8008281904610115 - - annotation: annotation - validated: true - masked: true - time: time - error: error - value: 0.8008281904610115 - - timeseries_id: timeseries_id - items: - - annotation: annotation - validated: true - masked: true - time: time - error: error - value: 0.8008281904610115 - - annotation: annotation - validated: true - masked: true - time: time - error: error - value: 0.8008281904610115 - properties: + service.DomainMap: + additionalProperties: items: - items: - $ref: '#/components/schemas/MeasurementCollection' - type: array + $ref: '#/components/schemas/db.DomainGroupOpt' + type: array type: object - ToggleOption: + service.Healthcheck: example: - value: true - enabled: true + status: status properties: - enabled: - type: boolean - value: - type: boolean + status: + type: string type: object - Token: + service.Heartbeat: example: - token_id: token_id - profile_id: profile_id - issued: issued - secret_token: secret_token + time: time properties: - issued: - type: string - profile_id: - type: string - secret_token: - type: string - token_id: + time: type: string type: object - TokenInfoProfile: + service.InstrumentsValidation: example: - token_id: token_id - issued: issued + is_valid: true + errors: + - errors + - errors properties: - issued: - type: string - token_id: - type: string + errors: + items: + type: string + type: array + is_valid: + type: boolean type: object - Unit: + service.ProjectCount: example: - measure: measure - unit_family_id: unit_family_id - name: name - unit_family: unit_family - id: id - abbreviation: abbreviation - measure_id: measure_id + project_count: 0 properties: - abbreviation: - type: string - id: - type: string - measure: - type: string - measure_id: - type: string - name: - type: string - unit_family: - type: string - unit_family_id: - type: string + project_count: + type: integer type: object - collectionGroupDetailsTimeseries: + service.ReportConfigWithPlotConfigs: example: - values: - - annotation: annotation - validated: true - masked: true - time: time - error: error - value: 0.8008281904610115 - - annotation: annotation - validated: true - masked: true - time: time - error: error - value: 0.8008281904610115 - instrument: instrument - type: type - instrument_id: instrument_id - unit: unit - parameter: parameter + global_overrides: + date_range: + value: value + enabled: true + show_nonvalidated: + value: true + enabled: true + show_masked: + value: true + enabled: true + created_at: created_at + description: description + project_name: project_name + created_by: created_by + updated_by_username: updated_by_username + district_name: district_name + updated_at: updated_at + project_id: project_id name: name - variable: variable - latest_value: 0.8008281904610115 + updated_by: updated_by + created_by_username: created_by_username id: id - instrument_slug: instrument_slug - is_computed: true - latest_time: latest_time - unit_id: unit_id + plot_configs: + - date_range: date_range + display: "{}" + show_comments: true + report_configs: + - name: name + id: id + slug: slug + - name: name + id: id + slug: slug + created_at: created_at + auto_range: true + show_masked: true + threshold: 0 + created_by: created_by + show_nonvalidated: true + updated_at: updated_at + project_id: project_id + name: name + updated_by: updated_by + id: id + plot_type: scatter-line + slug: slug + - date_range: date_range + display: "{}" + show_comments: true + report_configs: + - name: name + id: id + slug: slug + - name: name + id: id + slug: slug + created_at: created_at + auto_range: true + show_masked: true + threshold: 0 + created_by: created_by + show_nonvalidated: true + updated_at: updated_at + project_id: project_id + name: name + updated_by: updated_by + id: id + plot_type: scatter-line + slug: slug slug: slug - parameter_id: parameter_id properties: - id: + created_at: type: string - instrument: + created_by: type: string - instrument_id: + created_by_username: type: string - instrument_slug: + description: type: string - is_computed: - type: boolean - latest_time: + district_name: + type: string + global_overrides: + $ref: '#/components/schemas/db.ReportConfigGlobalOverrides' + id: type: string - latest_value: - type: number name: type: string - parameter: + plot_configs: + items: + $ref: '#/components/schemas/db.VPlotConfiguration' + type: array + project_id: type: string - parameter_id: + project_name: type: string slug: type: string - type: - type: string - unit: + updated_at: type: string - unit_id: + updated_by: type: string - values: - items: - $ref: '#/components/schemas/Measurement' - type: array - variable: + updated_by_username: type: string type: object - pgtype.JSON: + service.Token: example: - bytes: - - 0 - - 0 - status: 6 + token_id: token_id + profile_id: profile_id + id: id + issued: issued + hash: hash + secret_token: secret_token properties: - bytes: - items: - type: integer - type: array - status: - $ref: '#/components/schemas/pgtype.Status' + hash: + type: string + id: + type: string + issued: + type: string + profile_id: + type: string + secret_token: + type: string + token_id: + type: string + type: object + _timeseries_measurements_post_request: + properties: + timeseries_measurement_collections: + description: TOA5 file of timeseries measurement collections + format: binary + type: string type: object - pgtype.Status: - enum: - - 0 - - 1 - - 2 - type: integer - x-enum-varnames: - - Undefined - - "Null" - - Present securitySchemes: Bearer: description: Type "Bearer" followed by a space and access token. diff --git a/api/internal/service/alert.go b/api/internal/service/alert.go index 04d72cb5..35fa68aa 100644 --- a/api/internal/service/alert.go +++ b/api/internal/service/alert.go @@ -3,88 +3,60 @@ package service import ( "context" - "github.com/USACE/instrumentation-api/api/internal/model" + "github.com/USACE/instrumentation-api/api/internal/db" "github.com/google/uuid" ) -type AlertService interface { - CreateAlerts(ctx context.Context, alertConfigIDs []uuid.UUID) error - GetAllAlertsForProject(ctx context.Context, projectID uuid.UUID) ([]model.Alert, error) - GetAllAlertsForInstrument(ctx context.Context, instrumentID uuid.UUID) ([]model.Alert, error) - GetAllAlertsForProfile(ctx context.Context, profileID uuid.UUID) ([]model.Alert, error) - GetOneAlertForProfile(ctx context.Context, profileID uuid.UUID, alertID uuid.UUID) (model.Alert, error) - DoAlertRead(ctx context.Context, profileID uuid.UUID, alertID uuid.UUID) (model.Alert, error) - DoAlertUnread(ctx context.Context, profileID uuid.UUID, alertID uuid.UUID) (model.Alert, error) +func (s DBService) AlertCreateBatch(ctx context.Context, alertConfigIDs []uuid.UUID) error { + var err error + s.Queries.AlertCreateBatch(ctx, alertConfigIDs).Exec(batchExecErr(&err)) + return err } -type alertService struct { - db *model.Database - *model.Queries -} - -func NewAlertService(db *model.Database, q *model.Queries) *alertService { - return &alertService{db, q} -} - -// Create creates one or more new alerts -func (s alertService) CreateAlerts(ctx context.Context, alertConfigIDs []uuid.UUID) error { - tx, err := s.db.BeginTxx(ctx, nil) - if err != nil { - return err - } - defer model.TxDo(tx.Rollback) - - qtx := s.WithTx(tx) - for _, id := range alertConfigIDs { - if err := qtx.CreateAlerts(ctx, id); err != nil { - return err - } - } - return tx.Commit() -} - -// DoAlertRead marks an alert as read for a profile -func (s alertService) DoAlertRead(ctx context.Context, profileID, alertID uuid.UUID) (model.Alert, error) { - tx, err := s.db.BeginTxx(ctx, nil) +func (s DBService) AlertReadCreate(ctx context.Context, arg db.AlertReadCreateParams) (db.AlertGetRow, error) { + var a db.AlertGetRow + tx, err := s.db.Begin(ctx) if err != nil { - return model.Alert{}, err + return a, err } - defer model.TxDo(tx.Rollback) - + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) - if err := qtx.DoAlertRead(ctx, profileID, alertID); err != nil { - return model.Alert{}, err + if err := qtx.AlertReadCreate(ctx, arg); err != nil { + return a, err } - b, err := qtx.GetOneAlertForProfile(ctx, profileID, alertID) + a, err = qtx.AlertGet(ctx, db.AlertGetParams{ + ProfileID: arg.ProfileID, + ID: arg.AlertID, + }) if err != nil { - return model.Alert{}, err + return a, err } - if err := tx.Commit(); err != nil { - return model.Alert{}, err + if err := tx.Commit(ctx); err != nil { + return a, err } - - return b, nil + return a, nil } -// DoAlertUnread marks an alert as unread for a profile -func (s alertService) DoAlertUnread(ctx context.Context, profileID, alertID uuid.UUID) (model.Alert, error) { - tx, err := s.db.BeginTxx(ctx, nil) +func (s DBService) AlertReadDelete(ctx context.Context, arg db.AlertReadDeleteParams) (db.AlertGetRow, error) { + var a db.AlertGetRow + tx, err := s.db.Begin(ctx) if err != nil { - return model.Alert{}, err + return a, err } - defer model.TxDo(tx.Rollback) - + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) - if err := qtx.DoAlertUnread(ctx, profileID, alertID); err != nil { - return model.Alert{}, err + if err := qtx.AlertReadDelete(ctx, arg); err != nil { + return a, err } - a, err := qtx.GetOneAlertForProfile(ctx, profileID, alertID) + a, err = qtx.AlertGet(ctx, db.AlertGetParams{ + ProfileID: arg.ProfileID, + ID: arg.AlertID, + }) if err != nil { - return model.Alert{}, err + return a, err } - if err := tx.Commit(); err != nil { - return model.Alert{}, err + if err := tx.Commit(ctx); err != nil { + return a, err } - return a, nil } diff --git a/api/internal/service/alert_check.go b/api/internal/service/alert_check.go index a30c8488..80ff7b9e 100644 --- a/api/internal/service/alert_check.go +++ b/api/internal/service/alert_check.go @@ -9,11 +9,14 @@ import ( "time" "github.com/USACE/instrumentation-api/api/internal/config" - "github.com/USACE/instrumentation-api/api/internal/model" + "github.com/USACE/instrumentation-api/api/internal/db" + "github.com/USACE/instrumentation-api/api/internal/email" "github.com/USACE/instrumentation-api/api/internal/util" "github.com/google/uuid" ) +// TODO: refactor this to work with a task scheduler like airflow, or possibly gocron + var ( GreenSubmittalStatusID uuid.UUID = uuid.MustParse("0c0d6487-3f71-4121-8575-19514c7b9f03") YellowSubmittalStatusID uuid.UUID = uuid.MustParse("ef9a3235-f6e2-4e6c-92f6-760684308f7f") @@ -29,54 +32,38 @@ const ( reminder = "Reminder" ) -type AlertCheckService interface { - DoAlertChecks(ctx context.Context) error -} +type alertConfigMap map[uuid.UUID]db.VAlertConfig + +type submittalMap map[uuid.UUID]db.VSubmittal type alertConfigChecker[T alertChecker] interface { - GetAlertConfig() model.AlertConfig - SetAlertConfig(model.AlertConfig) + GetAlertConfig() db.VAlertConfig + SetAlertConfig(ac db.VAlertConfig) GetChecks() []T - SetChecks([]T) - DoEmail(string, config.AlertCheckConfig) error + SetChecks(checks []T) + DoEmail(content string, cfg *config.AlertCheckConfig) error } type alertChecker interface { GetShouldWarn() bool GetShouldAlert() bool GetShouldRemind() bool - GetSubmittal() model.Submittal - SetSubmittal(model.Submittal) -} - -type alertCheckService struct { - db *model.Database - *model.Queries - cfg *config.AlertCheckConfig + GetSubmittal() *db.VSubmittal + SetSubmittal(sub db.VSubmittal) } -func NewAlertCheckService(db *model.Database, q *model.Queries, cfg *config.AlertCheckConfig) *alertCheckService { - return &alertCheckService{db, q, cfg} -} - -func (s alertCheckService) DoAlertChecks(ctx context.Context) error { - if s.cfg == nil { - return fmt.Errorf("missing config") - } - - tx, err := s.db.BeginTxx(ctx, nil) +func (s DBService) DoAlertChecks(ctx context.Context, cfg *config.AlertCheckConfig) error { + tx, err := s.db.Begin(ctx) if err != nil { return err } - defer model.TxDo(tx.Rollback) - + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) - - subs, err := qtx.ListUnverifiedMissingSubmittals(ctx) + subs, err := qtx.SubmittalListUnverifiedMissing(ctx) if err != nil { return err } - acs, err := qtx.ListAndCheckAlertConfigs(ctx) + acs, err := qtx.AlertConfigListUpdateLastCheckedAt(ctx) if err != nil { return err } @@ -85,25 +72,25 @@ func (s alertCheckService) DoAlertChecks(ctx context.Context) error { return nil } - subMap := make(map[uuid.UUID]model.Submittal) + subMap := make(map[uuid.UUID]db.VSubmittal) for _, s := range subs { subMap[s.ID] = s } - acMap := make(map[uuid.UUID]model.AlertConfig) + acMap := make(map[uuid.UUID]db.VAlertConfig) for _, a := range acs { acMap[a.ID] = a } errs := make([]error, 0) - if err := checkMeasurements(ctx, qtx, subMap, acMap, *s.cfg); err != nil { + if err := checkMeasurements(ctx, qtx, subMap, acMap, cfg); err != nil { errs = append(errs, err) } - if err := checkEvaluations(ctx, qtx, subMap, acMap, *s.cfg); err != nil { + if err := checkEvaluations(ctx, qtx, subMap, acMap, cfg); err != nil { errs = append(errs, err) } - if err := tx.Commit(); err != nil { + if err := tx.Commit(ctx); err != nil { errs = append(errs, err) } @@ -114,29 +101,38 @@ func (s alertCheckService) DoAlertChecks(ctx context.Context) error { return nil } -func checkEvaluations(ctx context.Context, q *model.Queries, subMap model.SubmittalMap, acMap model.AlertConfigMap, cfg config.AlertCheckConfig) error { - accs := make([]*model.AlertConfigEvaluationCheck, 0) - ecs, err := q.GetAllIncompleteEvaluationSubmittals(ctx) +func checkEvaluations(ctx context.Context, q *db.Queries, subMap submittalMap, acMap alertConfigMap, cfg *config.AlertCheckConfig) error { + accs := make([]*AlertConfigEvaluationCheck, 0) + ecs, err := q.SubmittalListIncompleteEvaluation(ctx) if err != nil { return err } - ecMap := make(map[uuid.UUID][]*model.EvaluationCheck) + ecMap := make(map[uuid.UUID][]*EvaluationCheck) for k := range acMap { - ecMap[k] = make([]*model.EvaluationCheck, 0) + ecMap[k] = make([]*EvaluationCheck, 0) } for idx := range ecs { + ck := ecs[idx] + check := EvaluationCheck{ + AlertCheck: AlertCheck{ + AlertConfigID: ck.AlertConfigID, + SubmittalID: ck.SubmittalID, + ShouldWarn: ck.ShouldWarn, + ShouldAlert: ck.ShouldAlert, + ShouldRemind: ck.ShouldRemind, + }} if sub, ok := subMap[ecs[idx].SubmittalID]; ok { - ecs[idx].Submittal = sub - ecMap[ecs[idx].AlertConfigID] = append(ecMap[ecs[idx].AlertConfigID], ecs[idx]) + ecs[idx].Submittal = &sub + ecMap[ecs[idx].AlertConfigID] = append(ecMap[ecs[idx].AlertConfigID], &check) } } for k, v := range acMap { if v.AlertTypeID != EvaluationSubmittalAlertTypeID { continue } - acc := model.AlertConfigEvaluationCheck{ - AlertConfig: v, + acc := AlertConfigEvaluationCheck{ + AlertConfig: AlertConfig(v), AlertChecks: ecMap[k], } accs = append(accs, &acc) @@ -151,22 +147,31 @@ func checkEvaluations(ctx context.Context, q *model.Queries, subMap model.Submit return nil } -func checkMeasurements(ctx context.Context, q *model.Queries, subMap model.SubmittalMap, acMap model.AlertConfigMap, cfg config.AlertCheckConfig) error { - accs := make([]*model.AlertConfigMeasurementCheck, 0) - mcs, err := q.GetAllIncompleteMeasurementSubmittals(ctx) +func checkMeasurements(ctx context.Context, q *db.Queries, subMap submittalMap, acMap alertConfigMap, cfg *config.AlertCheckConfig) error { + accs := make([]*AlertConfigMeasurementCheck, 0) + mcs, err := q.SubmittalListIncompleteMeasurement(ctx) if err != nil { return err } - mcMap := make(map[uuid.UUID][]*model.MeasurementCheck) + mcMap := make(map[uuid.UUID][]*MeasurementCheck) for k := range acMap { - mcMap[k] = make([]*model.MeasurementCheck, 0) + mcMap[k] = make([]*MeasurementCheck, 0) } for idx := range mcs { if sub, ok := subMap[mcs[idx].SubmittalID]; ok { - mcs[idx].Submittal = sub - mcMap[mcs[idx].AlertConfigID] = append(mcMap[mcs[idx].AlertConfigID], mcs[idx]) + ck := mcs[idx] + check := MeasurementCheck{ + AlertCheck: AlertCheck{ + AlertConfigID: ck.AlertConfigID, + SubmittalID: ck.SubmittalID, + ShouldWarn: ck.ShouldWarn, + ShouldAlert: ck.ShouldAlert, + ShouldRemind: ck.ShouldRemind, + }} + mcs[idx].Submittal = &sub + mcMap[mcs[idx].AlertConfigID] = append(mcMap[mcs[idx].AlertConfigID], &check) } } @@ -174,8 +179,8 @@ func checkMeasurements(ctx context.Context, q *model.Queries, subMap model.Submi if v.AlertTypeID != MeasurementSubmittalAlertTypeID { continue } - acc := model.AlertConfigMeasurementCheck{ - AlertConfig: v, + acc := AlertConfigMeasurementCheck{ + AlertConfig: AlertConfig(v), AlertChecks: mcMap[k], } accs = append(accs, &acc) @@ -189,21 +194,35 @@ func checkMeasurements(ctx context.Context, q *model.Queries, subMap model.Submi return nil } -func updateAlertConfigChecks[T alertChecker, PT alertConfigChecker[T]](ctx context.Context, q *model.Queries, accs []PT) error { +func updateAlertConfigChecks[T alertChecker, PT alertConfigChecker[T]](ctx context.Context, q *db.Queries, accs []PT) error { for _, acc := range accs { ac := acc.GetAlertConfig() - if err := q.UpdateAlertConfigLastReminded(ctx, ac); err != nil { + if err := q.AlertConfigUpdateLastRemindedAt(ctx, db.AlertConfigUpdateLastRemindedAtParams{ + ID: ac.ID, + LastRemindedAt: ac.LastRemindedAt, + }); err != nil { return err } checks := acc.GetChecks() for _, c := range checks { sub := c.GetSubmittal() - if err := q.UpdateSubmittalCompletionDateOrWarningSent(ctx, sub); err != nil { + if sub == nil { + continue + } + if err := q.SubmittalUpdateCompletionDateOrWarningSent(ctx, db.SubmittalUpdateCompletionDateOrWarningSentParams{ + ID: sub.ID, + SubmittalStatusID: &sub.SubmittalStatusID, + CompletedAt: sub.CompletedAt, + WarningSent: sub.WarningSent, + }); err != nil { return err } } if ac.CreateNextSubmittalFrom != nil { - if err := q.CreateNextSubmittalFromNewAlertConfigDate(ctx, ac); err != nil { + if err := q.SubmittalCreateNextFromNewAlertConfigDate(ctx, db.SubmittalCreateNextFromNewAlertConfigDateParams{ + ID: ac.ID, + Date: *ac.CreateNextSubmittalFrom, + }); err != nil { return err } } @@ -224,7 +243,7 @@ func updateAlertConfigChecks[T alertChecker, PT alertConfigChecker[T]](ctx conte // TODO: smtp.SendMail esablishes a new connection for each batch of emails sent. I would be better to aggregate // the contents of each email, then create a connection pool to reuse and send all emails at once, with any errors wrapped and returned // p.s. Dear future me/someone else: I'm sorry -func handleChecks[T alertChecker, PT alertConfigChecker[T]](ctx context.Context, q *model.Queries, accs []PT, cfg config.AlertCheckConfig) error { +func handleChecks[T alertChecker, PT alertConfigChecker[T]](ctx context.Context, q *db.Queries, accs []PT, cfg *config.AlertCheckConfig) error { defer util.Timer()() mu := &sync.Mutex{} @@ -264,22 +283,22 @@ func handleChecks[T alertChecker, PT alertConfigChecker[T]](ctx context.Context, // completion_date to current timestamp if sub.SubmittalStatusID == RedSubmittalStatusID { sub.SubmittalStatusID = YellowSubmittalStatusID - sub.CompletionDate = &t + sub.CompletedAt = &t ac.CreateNextSubmittalFrom = &t } else // if submittal status is green and the current time is not before the submittal due date, // complete the submittal at that due date and prepare the next submittal interval - if sub.SubmittalStatusID == GreenSubmittalStatusID && !t.Before(sub.DueDate) { - sub.CompletionDate = &sub.DueDate - ac.CreateNextSubmittalFrom = &sub.DueDate + if sub.SubmittalStatusID == GreenSubmittalStatusID && !t.Before(sub.DueAt) { + sub.CompletedAt = &sub.DueAt + ac.CreateNextSubmittalFrom = &sub.DueAt } } else // if any submittal warning is triggered, immediately send a // warning email, since submittal due dates are unique within alert configs if shouldWarn && !sub.WarningSent { - if !ac.MuteConsecutiveAlerts || ac.LastReminded == nil { + if !ac.MuteConsecutiveAlerts || ac.LastRemindedAt == nil { mu.Lock() if err := acc.DoEmail(warning, cfg); err != nil { errs = append(errs, err) @@ -296,7 +315,7 @@ func handleChecks[T alertChecker, PT alertConfigChecker[T]](ctx context.Context, if sub.SubmittalStatusID != RedSubmittalStatusID { sub.SubmittalStatusID = RedSubmittalStatusID acAlert = true - ac.CreateNextSubmittalFrom = &sub.DueDate + ac.CreateNextSubmittalFrom = &sub.DueAt } resetReminders = false } @@ -307,23 +326,27 @@ func handleChecks[T alertChecker, PT alertConfigChecker[T]](ctx context.Context, acReminder = true } - c.SetSubmittal(sub) + if sub == nil { + continue + } + + c.SetSubmittal(*sub) checks[j] = c } // if there are no alerts, there should also be no reminders sent. "last_reminded" is used to determine // if an alert has already been sent for an alert config, and send a reminder if so if resetReminders { - ac.LastReminded = nil + ac.LastRemindedAt = nil } // if there are any reminders within an alert config, they will override the alerts if MuteConsecutiveAlerts is true - if acAlert && ((!acReminder && ac.LastReminded == nil) || !ac.MuteConsecutiveAlerts) { - ac.LastReminded = &t + if acAlert && ((!acReminder && ac.LastRemindedAt == nil) || !ac.MuteConsecutiveAlerts) { + ac.LastRemindedAt = &t sendAlertEmail = true } - if acReminder && ac.LastReminded != nil { - ac.LastReminded = &t + if acReminder && ac.LastRemindedAt != nil { + ac.LastRemindedAt = &t sendReminderEmail = true } @@ -360,3 +383,157 @@ func handleChecks[T alertChecker, PT alertConfigChecker[T]](ctx context.Context, return nil } + +type AlertCheck struct { + AlertConfigID uuid.UUID + SubmittalID uuid.UUID + ShouldWarn bool + ShouldAlert bool + ShouldRemind bool + Submittal *db.VSubmittal +} + +func (ck AlertCheck) GetShouldWarn() bool { + return ck.ShouldWarn +} + +func (ck AlertCheck) GetShouldAlert() bool { + return ck.ShouldAlert +} + +func (ck AlertCheck) GetShouldRemind() bool { + return ck.ShouldRemind +} + +func (ck AlertCheck) GetSubmittal() *db.VSubmittal { + return ck.Submittal +} + +func (ck *AlertCheck) SetSubmittal(sub db.VSubmittal) { + ck.Submittal = &sub +} + +type AlertConfig db.VAlertConfig + +func (a *AlertConfig) GetToAddresses() []string { + emails := make([]string, len(a.AlertEmailSubscriptions)) + for idx := range a.AlertEmailSubscriptions { + emails[idx] = a.AlertEmailSubscriptions[idx].Email + } + return emails +} + +type AlertConfigEvaluationCheck struct { + AlertConfig + AlertChecks []*EvaluationCheck +} + +type EvaluationCheck struct { + AlertCheck +} + +func (a AlertConfigEvaluationCheck) GetAlertConfig() db.VAlertConfig { + return db.VAlertConfig(a.AlertConfig) +} + +func (a *AlertConfigEvaluationCheck) SetAlertConfig(ac db.VAlertConfig) { + a.AlertConfig = AlertConfig(ac) +} + +func (a AlertConfigEvaluationCheck) GetChecks() []*EvaluationCheck { + return a.AlertChecks +} + +func (a *AlertConfigEvaluationCheck) SetChecks(ec []*EvaluationCheck) { + a.AlertChecks = ec +} + +func (acc AlertConfigEvaluationCheck) DoEmail(emailType string, cfg *config.AlertCheckConfig) error { + if emailType == "" { + return fmt.Errorf("must provide emailType") + } + preformatted := email.EmailContent{ + TextSubject: "-- DO NOT REPLY -- MIDAS " + emailType + ": Evaluation Submittal", + TextBody: "The following " + emailType + " has been triggered:\r\n\r\n" + + "Project: {{.AlertConfig.ProjectName}}\r\n" + + "Alert Type: Evaluation Submittal\r\n" + + "Alert Name: \"{{.AlertConfig.Name}}\"\r\n" + + "Description: \"{{.AlertConfig.Body}}\"\r\n" + + "Expected Evaluation Submittals:\r\n" + + "{{range .AlertChecks}}{{if or .ShouldAlert .ShouldWarn}}" + + "\t• {{.Submittal.CreatedAt.Format \"Jan 02 2006 15:04:05 UTC\"}} - {{.Submittal.DueDate.Format \"Jan 02 2006 15:04:05 UTC\"}}" + + "{{if .ShouldAlert}} (missing) {{else if .ShouldWarn}} (warning) {{end}}\r\n{{end}}{{end}}", + } + templContent, err := email.CreateEmailTemplateContent(preformatted) + if err != nil { + return err + } + content, err := email.FormatAlertConfigTemplates(templContent, acc) + if err != nil { + return err + } + content.To = acc.AlertConfig.GetToAddresses() + if err := email.ConstructAndSendEmail(content, cfg); err != nil { + return err + } + return nil +} + +type AlertConfigMeasurementCheck struct { + AlertConfig AlertConfig + AlertChecks []*MeasurementCheck +} + +type MeasurementCheck struct { + AlertCheck + AffectedTimeseries []db.AlertCheckMeasurementSubmittalAffectedTimeseries +} + +func (a AlertConfigMeasurementCheck) GetAlertConfig() db.VAlertConfig { + return db.VAlertConfig(a.AlertConfig) +} + +func (a *AlertConfigMeasurementCheck) SetAlertConfig(ac db.VAlertConfig) { + a.AlertConfig = AlertConfig(ac) +} + +func (a AlertConfigMeasurementCheck) GetChecks() []*MeasurementCheck { + return a.AlertChecks +} + +func (a *AlertConfigMeasurementCheck) SetChecks(mc []*MeasurementCheck) { + a.AlertChecks = mc +} + +func (ms AlertConfigMeasurementCheck) DoEmail(emailType string, cfg *config.AlertCheckConfig) error { + if emailType == "" { + return fmt.Errorf("must provide emailType") + } + preformatted := email.EmailContent{ + TextSubject: "-- DO NOT REPLY -- MIDAS " + emailType + ": Timeseries Measurement Submittal", + TextBody: "The following " + emailType + " has been triggered:\r\n\r\n" + + "Project: {{.AlertConfig.ProjectName}}\r\n" + + "Alert Type: Measurement Submittal\r\n" + + "Alert Name: \"{{.AlertConfig.Name}}\"\r\n" + + "Description: \"{{.AlertConfig.Body}}\"\r\n" + + "Expected Measurement Submittals:\r\n" + + "{{range .AlertChecks}}" + + "\t• {{.Submittal.CreatedAt.Format \"Jan 02 2006 15:04:05 UTC\"}} - {{.Submittal.DueDate.Format \"Jan 02 2006 15:04:05 UTC\"}}\r\n" + + "{{range .AffectedTimeseries}}" + + "\t\t• {{.InstrumentName}}: {{.TimeseriesName}} ({{.Status}})\r\n" + + "{{end}}\r\n{{end}}", + } + templContent, err := email.CreateEmailTemplateContent(preformatted) + if err != nil { + return err + } + content, err := email.FormatAlertConfigTemplates(templContent, ms) + if err != nil { + return err + } + content.To = ms.AlertConfig.GetToAddresses() + if err := email.ConstructAndSendEmail(content, cfg); err != nil { + return err + } + return nil +} diff --git a/api/internal/service/alert_config.go b/api/internal/service/alert_config.go index f0799794..68d486bc 100644 --- a/api/internal/service/alert_config.go +++ b/api/internal/service/alert_config.go @@ -3,37 +3,18 @@ package service import ( "context" - "github.com/USACE/instrumentation-api/api/internal/model" + "github.com/USACE/instrumentation-api/api/internal/db" + "github.com/USACE/instrumentation-api/api/internal/dto" "github.com/google/uuid" ) -type AlertConfigService interface { - GetAllAlertConfigsForProject(ctx context.Context, projectID uuid.UUID) ([]model.AlertConfig, error) - GetAllAlertConfigsForProjectAndAlertType(ctx context.Context, projectID, alertTypeID uuid.UUID) ([]model.AlertConfig, error) - GetAllAlertConfigsForInstrument(ctx context.Context, instrumentID uuid.UUID) ([]model.AlertConfig, error) - GetOneAlertConfig(ctx context.Context, alertConfigID uuid.UUID) (model.AlertConfig, error) - CreateAlertConfig(ctx context.Context, ac model.AlertConfig) (model.AlertConfig, error) - UpdateAlertConfig(ctx context.Context, alertConfigID uuid.UUID, ac model.AlertConfig) (model.AlertConfig, error) - DeleteAlertConfig(ctx context.Context, alertConfigID uuid.UUID) error -} - -type alertConfigService struct { - db *model.Database - *model.Queries -} - -func NewAlertConfigService(db *model.Database, q *model.Queries) *alertConfigService { - return &alertConfigService{db, q} -} - -// CreateAlertConfig creates one new alert configuration -func (s alertConfigService) CreateAlertConfig(ctx context.Context, ac model.AlertConfig) (model.AlertConfig, error) { - var a model.AlertConfig - tx, err := s.db.BeginTxx(ctx, nil) +func (s DBService) AlertConfigCreate(ctx context.Context, ac dto.AlertConfig) (db.VAlertConfig, error) { + var a db.VAlertConfig + tx, err := s.db.Begin(ctx) if err != nil { return a, err } - defer model.TxDo(tx.Rollback) + defer txDo(ctx, tx.Rollback) if ac.RemindInterval == "" { ac.RemindInterval = "PT0" @@ -44,13 +25,28 @@ func (s alertConfigService) CreateAlertConfig(ctx context.Context, ac model.Aler qtx := s.WithTx(tx) - acID, err := qtx.CreateAlertConfig(ctx, ac) + acID, err := qtx.AlertConfigCreate(ctx, db.AlertConfigCreateParams{ + ProjectID: ac.ProjectID, + Name: ac.Name, + Body: ac.Body, + AlertTypeID: ac.AlertTypeID, + StartedAt: ac.StartedAt, + ScheduleInterval: ac.ScheduleInterval, + MuteConsecutiveAlerts: ac.MuteConsecutiveAlerts, + RemindInterval: ac.RemindInterval, + WarningInterval: ac.WarningInterval, + CreatedBy: ac.CreatedBy, + CreatedAt: ac.CreatedAt, + }) if err != nil { return a, err } for _, aci := range ac.Instruments { - if err := qtx.AssignInstrumentToAlertConfig(ctx, acID, aci.InstrumentID); err != nil { + if err := qtx.AlertConfigInstrumentCreateAssignment(ctx, db.AlertConfigInstrumentCreateAssignmentParams{ + AlertConfigID: acID, + InstrumentID: aci.InstrumentID, + }); err != nil { return a, err } } @@ -59,29 +55,29 @@ func (s alertConfigService) CreateAlertConfig(ctx context.Context, ac model.Aler return a, err } - if err := qtx.CreateNextSubmittalFromExistingAlertConfigDate(ctx, acID); err != nil { + if err := qtx.SubmittalCreateNextFromExistingAlertConfigDate(ctx, acID); err != nil { return a, err } - acNew, err := qtx.GetOneAlertConfig(ctx, acID) + acNew, err := qtx.AlertConfigGet(ctx, acID) if err != nil { return a, err } - if err := tx.Commit(); err != nil { + if err := tx.Commit(ctx); err != nil { return a, err } return acNew, nil } -// UpdateAlertConfig updates an alert config -func (s alertConfigService) UpdateAlertConfig(ctx context.Context, alertConfigID uuid.UUID, ac model.AlertConfig) (model.AlertConfig, error) { - tx, err := s.db.BeginTxx(ctx, nil) +func (s DBService) AlertConfigUpdate(ctx context.Context, alertConfigID uuid.UUID, ac dto.AlertConfig) (db.VAlertConfig, error) { + var a db.VAlertConfig + tx, err := s.db.Begin(ctx) if err != nil { - return model.AlertConfig{}, err + return a, err } - defer model.TxDo(tx.Rollback) + defer txDo(ctx, tx.Rollback) if ac.RemindInterval == "" { ac.RemindInterval = "PT0" @@ -92,39 +88,54 @@ func (s alertConfigService) UpdateAlertConfig(ctx context.Context, alertConfigID qtx := s.WithTx(tx) - if err := qtx.UpdateAlertConfig(ctx, ac); err != nil { - return model.AlertConfig{}, err + if err := qtx.AlertConfigUpdate(ctx, db.AlertConfigUpdateParams{ + ID: ac.ID, + ProjectID: ac.ProjectID, + Name: ac.Name, + Body: ac.Body, + StartedAt: ac.StartedAt, + ScheduleInterval: ac.ScheduleInterval, + MuteConsecutiveAlerts: ac.MuteConsecutiveAlerts, + RemindInterval: ac.RemindInterval, + WarningInterval: ac.WarningInterval, + UpdatedBy: ac.UpdatedBy, + UpdatedAt: ac.UpdatedAt, + }); err != nil { + return a, err } - if err := qtx.UnassignAllInstrumentsFromAlertConfig(ctx, alertConfigID); err != nil { - return model.AlertConfig{}, err + if err := qtx.AlertConfigInstrumentDeleteAssignmentsForAlertConfig(ctx, alertConfigID); err != nil { + return a, err } for _, aci := range ac.Instruments { - if err := qtx.AssignInstrumentToAlertConfig(ctx, alertConfigID, aci.InstrumentID); err != nil { - return model.AlertConfig{}, err + if err := qtx.AlertConfigInstrumentCreateAssignment(ctx, db.AlertConfigInstrumentCreateAssignmentParams{ + AlertConfigID: alertConfigID, + InstrumentID: aci.InstrumentID, + }); err != nil { + return a, err } } - if err := qtx.UnsubscribeAllEmailsFromAlertConfig(ctx, alertConfigID); err != nil { - return model.AlertConfig{}, err + if err := qtx.AlertEmailSubscritpionDeleteForAlertConfig(ctx, alertConfigID); err != nil { + return a, err } if err := registerAndSubscribe(ctx, qtx, alertConfigID, ac.AlertEmailSubscriptions); err != nil { - return model.AlertConfig{}, err + return a, err } - if err := qtx.UpdateFutureSubmittalForAlertConfig(ctx, alertConfigID); err != nil { - return model.AlertConfig{}, err + if _, err := qtx.SubmittalUpdateNextForAlertConfig(ctx, &alertConfigID); err != nil { + return a, err } - acNew, err := qtx.GetOneAlertConfig(ctx, alertConfigID) + a, err = qtx.AlertConfigGet(ctx, alertConfigID) if err != nil { - return model.AlertConfig{}, err + return a, err } - if err := tx.Commit(); err != nil { - return model.AlertConfig{}, err + if err := tx.Commit(ctx); err != nil { + return a, err } - return acNew, nil + return a, nil } diff --git a/api/internal/service/alert_subscription.go b/api/internal/service/alert_subscription.go index 75b60079..18a7a8b8 100644 --- a/api/internal/service/alert_subscription.go +++ b/api/internal/service/alert_subscription.go @@ -4,223 +4,209 @@ import ( "context" "fmt" - "github.com/USACE/instrumentation-api/api/internal/model" + "github.com/USACE/instrumentation-api/api/internal/db" + "github.com/USACE/instrumentation-api/api/internal/dto" "github.com/google/uuid" ) const ( - unknown = "" - email = "email" - profile = "profile" + unknownUserType = "" + emailUserType = "email" + profileUserType = "profile" ) -type AlertSubscriptionService interface { - SubscribeProfileToAlerts(ctx context.Context, alertConfigID, profileID uuid.UUID) (model.AlertSubscription, error) - UnsubscribeProfileToAlerts(ctx context.Context, alertConfigID, profileID uuid.UUID) error - GetAlertSubscription(ctx context.Context, alertConfigID, profileID uuid.UUID) (model.AlertSubscription, error) - GetAlertSubscriptionByID(ctx context.Context, subscriptionID uuid.UUID) (model.AlertSubscription, error) - ListMyAlertSubscriptions(ctx context.Context, profileID uuid.UUID) ([]model.AlertSubscription, error) - UpdateMyAlertSubscription(ctx context.Context, s model.AlertSubscription) (model.AlertSubscription, error) - SubscribeEmailsToAlertConfig(ctx context.Context, alertConfigID uuid.UUID, emails []model.EmailAutocompleteResult) (model.AlertConfig, error) - UnsubscribeEmailsFromAlertConfig(ctx context.Context, alertConfigID uuid.UUID, emails []model.EmailAutocompleteResult) (model.AlertConfig, error) - UnsubscribeAllFromAlertConfig(ctx context.Context, alertConfigID uuid.UUID) error - UnregisterEmail(ctx context.Context, emailID uuid.UUID) error -} - -type alertSubscriptionService struct { - db *model.Database - *model.Queries -} - -func NewAlertSubscriptionService(db *model.Database, q *model.Queries) *alertSubscriptionService { - return &alertSubscriptionService{db, q} -} - -// SubscribeProfileToAlerts subscribes a profile to an instrument alert -func (s alertSubscriptionService) SubscribeProfileToAlerts(ctx context.Context, alertConfigID uuid.UUID, profileID uuid.UUID) (model.AlertSubscription, error) { - var a model.AlertSubscription - tx, err := s.db.BeginTxx(ctx, nil) +func (s DBService) AlertProfileSubscriptionCreateForAlertConfigProfile(ctx context.Context, alertConfigID uuid.UUID, profileID uuid.UUID) (db.AlertProfileSubscription, error) { + var a db.AlertProfileSubscription + tx, err := s.db.Begin(ctx) if err != nil { return a, err } - defer model.TxDo(tx.Rollback) + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) - if err := qtx.SubscribeProfileToAlerts(ctx, alertConfigID, profileID); err != nil { + if err := qtx.AlertProfileSubscriptionCreateOnAnyConflictDoNothing(ctx, db.AlertProfileSubscriptionCreateOnAnyConflictDoNothingParams{ + AlertConfigID: alertConfigID, + ProfileID: profileID, + }); err != nil { return a, err } - updated, err := qtx.GetAlertSubscription(ctx, alertConfigID, profileID) + updated, err := qtx.AlertSubscriptionGetForAlertConfigProfile(ctx, db.AlertSubscriptionGetForAlertConfigProfileParams{ + AlertConfigID: alertConfigID, + ProfileID: profileID, + }) if err != nil { return a, err } - if err := tx.Commit(); err != nil { + if err := tx.Commit(ctx); err != nil { return a, err } return updated, nil } -// UpdateMyAlertSubscription updates properties on a AlertSubscription -func (s alertSubscriptionService) UpdateMyAlertSubscription(ctx context.Context, sub model.AlertSubscription) (model.AlertSubscription, error) { - var a model.AlertSubscription - tx, err := s.db.BeginTxx(ctx, nil) +func (s DBService) AlertProfileSubscriptionUpdateForProfile(ctx context.Context, sub dto.AlertSubscription) (db.AlertProfileSubscription, error) { + var a db.AlertProfileSubscription + tx, err := s.db.Begin(ctx) if err != nil { return a, err } - defer model.TxDo(tx.Rollback) - + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) - - if err := qtx.UpdateMyAlertSubscription(ctx, sub); err != nil { + if err := qtx.AlertSubscriptionUpdateForProfile(ctx, db.AlertSubscriptionUpdateForProfileParams{ + MuteUi: sub.MuteUI, + MuteNotify: sub.MuteNotify, + AlertConfigID: sub.AlertConfigID, + ProfileID: sub.ProfileID, + }); err != nil { return a, err } - - updated, err := qtx.GetAlertSubscription(ctx, sub.AlertConfigID, sub.ProfileID) + updated, err := qtx.AlertSubscriptionGet(ctx, sub.ID) if err != nil { return a, err } - - if err := tx.Commit(); err != nil { + if err := tx.Commit(ctx); err != nil { return a, err } - return updated, nil } -func (s alertSubscriptionService) SubscribeEmailsToAlertConfig(ctx context.Context, alertConfigID uuid.UUID, emails []model.EmailAutocompleteResult) (model.AlertConfig, error) { - var a model.AlertConfig - tx, err := s.db.BeginTxx(ctx, nil) +func (s DBService) AlertEmailSubscriptionCreateForAlertConfig(ctx context.Context, alertConfigID uuid.UUID, emails []dto.EmailAutocompleteResult) (db.VAlertConfig, error) { + var a db.VAlertConfig + tx, err := s.db.Begin(ctx) if err != nil { return a, err } - defer model.TxDo(tx.Rollback) - + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) - if err := registerAndSubscribe(ctx, qtx, alertConfigID, emails); err != nil { return a, err } - // Register any emails that are not yet in system for idx, em := range emails { - if em.UserType == unknown || em.UserType == email { - newID, err := qtx.RegisterEmail(ctx, em.Email) + if em.UserType == unknownUserType || em.UserType == emailUserType { + newID, err := qtx.EmailGetOrCreate(ctx, em.Email) if err != nil { return a, err } emails[idx].ID = newID - emails[idx].UserType = email + emails[idx].UserType = emailUserType } } // Subscribe emails for _, em := range emails { - if em.UserType == email { - if err := qtx.SubscribeEmailToAlertConfig(ctx, alertConfigID, em.ID); err != nil { + if em.UserType == emailUserType { + if err := qtx.AlertEmailSubscriptionCreate(ctx, db.AlertEmailSubscriptionCreateParams{ + AlertConfigID: alertConfigID, + EmailID: em.ID, + }); err != nil { return a, err } - } else if em.UserType == profile { - if err := qtx.SubscribeProfileToAlertConfig(ctx, alertConfigID, em.ID); err != nil { + } else if em.UserType == profileUserType { + if err := qtx.AlertProfileSubscriptionCreate(ctx, db.AlertProfileSubscriptionCreateParams{ + AlertConfigID: alertConfigID, + ProfileID: em.ID, + }); err != nil { return a, err } } else { return a, fmt.Errorf("unable to unsubscribe email %s: user type %s does not exist, aborting transaction", em.Email, em.UserType) } } - - acUpdated, err := qtx.GetOneAlertConfig(ctx, alertConfigID) + acUpdated, err := qtx.AlertConfigGet(ctx, alertConfigID) if err != nil { return a, err } - - if err := tx.Commit(); err != nil { + if err := tx.Commit(ctx); err != nil { return a, err } - return acUpdated, nil } -func (s alertSubscriptionService) UnsubscribeEmailsFromAlertConfig(ctx context.Context, alertConfigID uuid.UUID, emails []model.EmailAutocompleteResult) (model.AlertConfig, error) { - var a model.AlertConfig - tx, err := s.db.BeginTxx(ctx, nil) +func (s DBService) AlertEmailSubscriptionDeleteForAlertConfig(ctx context.Context, alertConfigID uuid.UUID, emails []dto.EmailAutocompleteResult) (db.VAlertConfig, error) { + var a db.VAlertConfig + tx, err := s.db.Begin(ctx) if err != nil { return a, err } - defer model.TxDo(tx.Rollback) - + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) - for _, em := range emails { - if em.UserType == unknown { + if em.UserType == unknownUserType { return a, fmt.Errorf("required field user_type is null, aborting transaction") - } else if em.UserType == email { - if err := qtx.UnsubscribeEmailFromAlertConfig(ctx, alertConfigID, em.ID); err != nil { + } else if em.UserType == emailUserType { + if err := qtx.AlertEmailSubscriptionDelete(ctx, db.AlertEmailSubscriptionDeleteParams{ + AlertConfigID: alertConfigID, + EmailID: em.ID, + }); err != nil { return a, err } - } else if em.UserType == profile { - if err := qtx.UnsubscribeProfileFromAlertConfig(ctx, alertConfigID, em.ID); err != nil { + } else if em.UserType == profileUserType { + if err := qtx.AlertProfileSubscriptionDelete(ctx, db.AlertProfileSubscriptionDeleteParams{ + AlertConfigID: alertConfigID, + ProfileID: em.ID, + }); err != nil { return a, err } } else { return a, fmt.Errorf("unable to unsubscribe email %s: user type %s does not exist, aborting transaction", em.Email, em.UserType) } } - - acUpdated, err := qtx.GetOneAlertConfig(ctx, alertConfigID) + acUpdated, err := qtx.AlertConfigGet(ctx, alertConfigID) if err != nil { return a, err } - - if err := tx.Commit(); err != nil { + if err := tx.Commit(ctx); err != nil { return a, err } - return acUpdated, nil } -func (s alertSubscriptionService) UnsubscribeAllFromAlertConfig(ctx context.Context, alertConfigID uuid.UUID) error { - tx, err := s.db.BeginTxx(ctx, nil) +func (s DBService) AlertSubscriptionDeleteForAlertConfig(ctx context.Context, alertConfigID uuid.UUID) error { + tx, err := s.db.Begin(ctx) if err != nil { return err } - defer model.TxDo(tx.Rollback) - + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) - - if err := qtx.UnsubscribeAllEmailsFromAlertConfig(ctx, alertConfigID); err != nil { + if err := qtx.AlertEmailSubscritpionDeleteForAlertConfig(ctx, alertConfigID); err != nil { return err } - - if err := qtx.UnsubscribeAllProfilesFromAlertConfig(ctx, alertConfigID); err != nil { + if err := qtx.AlertProfileSubscritpionDeleteForAlertConfig(ctx, alertConfigID); err != nil { return err } - - if err := tx.Commit(); err != nil { + if err := tx.Commit(ctx); err != nil { return err } return nil } -func registerAndSubscribe(ctx context.Context, q *model.Queries, alertConfigID uuid.UUID, emails []model.EmailAutocompleteResult) error { +func registerAndSubscribe(ctx context.Context, q *db.Queries, alertConfigID uuid.UUID, emails []dto.EmailAutocompleteResult) error { for idx, em := range emails { - if em.UserType == unknown || em.UserType == email { - newID, err := q.RegisterEmail(ctx, em.Email) + if em.UserType == unknownUserType || em.UserType == emailUserType { + newID, err := q.EmailGetOrCreate(ctx, em.Email) if err != nil { return err } emails[idx].ID = newID - emails[idx].UserType = email + emails[idx].UserType = emailUserType } } for _, em := range emails { - if em.UserType == email { - if err := q.SubscribeEmailToAlertConfig(ctx, alertConfigID, em.ID); err != nil { + if em.UserType == emailUserType { + if err := q.AlertEmailSubscriptionCreate(ctx, db.AlertEmailSubscriptionCreateParams{ + AlertConfigID: alertConfigID, + EmailID: em.ID, + }); err != nil { return err } - } else if em.UserType == profile { - if err := q.SubscribeProfileToAlertConfig(ctx, alertConfigID, em.ID); err != nil { + } else if em.UserType == profileUserType { + if err := q.AlertProfileSubscriptionCreate(ctx, db.AlertProfileSubscriptionCreateParams{ + AlertConfigID: alertConfigID, + ProfileID: em.ID, + }); err != nil { return err } } else { diff --git a/api/internal/service/autocomplete.go b/api/internal/service/autocomplete.go deleted file mode 100644 index ebf9b95a..00000000 --- a/api/internal/service/autocomplete.go +++ /dev/null @@ -1,20 +0,0 @@ -package service - -import ( - "context" - - "github.com/USACE/instrumentation-api/api/internal/model" -) - -type EmailAutocompleteService interface { - ListEmailAutocomplete(ctx context.Context, emailInput string, limit int) ([]model.EmailAutocompleteResult, error) -} - -type emailAutocompleteService struct { - db *model.Database - *model.Queries -} - -func NewEmailAutocompleteService(db *model.Database, q *model.Queries) *emailAutocompleteService { - return &emailAutocompleteService{db, q} -} diff --git a/api/internal/service/aware.go b/api/internal/service/aware.go index 28798558..cc89eae2 100644 --- a/api/internal/service/aware.go +++ b/api/internal/service/aware.go @@ -3,37 +3,25 @@ package service import ( "context" - "github.com/USACE/instrumentation-api/api/internal/model" "github.com/google/uuid" ) -type AwareParameterService interface { - ListAwareParameters(ctx context.Context) ([]model.AwareParameter, error) - ListAwarePlatformParameterConfig(ctx context.Context) ([]model.AwarePlatformParameterConfig, error) +type AwarePlatformParameterConfig struct { + InstrumentID uuid.UUID `json:"instrument_id" db:"instrument_id"` + AwareID uuid.UUID `json:"aware_id" db:"aware_id"` + AwareParameters map[string]*uuid.UUID `json:"aware_parameters"` } -type awareParameterService struct { - db *model.Database - *model.Queries -} - -func NewAwareParameterService(db *model.Database, q *model.Queries) *awareParameterService { - return &awareParameterService{db, q} -} - -// ListAwarePlatformParameterConfig returns aware platform parameter configs -func (s awareParameterService) ListAwarePlatformParameterConfig(ctx context.Context) ([]model.AwarePlatformParameterConfig, error) { - aa := make([]model.AwarePlatformParameterConfig, 0) - ee, err := s.ListAwarePlatformParameterEnabled(ctx) +func (s DBService) AwarePlatformParameterConfigList(ctx context.Context) ([]AwarePlatformParameterConfig, error) { + aa := make([]AwarePlatformParameterConfig, 0) + ee, err := s.Queries.AwarePlatformParameterListEnabled(ctx) if err != nil { return aa, err } - // reorganize aware_parameter_key, timeseries_id into map for each instrument - // Map of aware parameters to timeseries - m1 := make(map[uuid.UUID]model.AwarePlatformParameterConfig) + m1 := make(map[uuid.UUID]AwarePlatformParameterConfig) for _, e := range ee { if _, ok := m1[e.InstrumentID]; !ok { - m1[e.InstrumentID] = model.AwarePlatformParameterConfig{ + m1[e.InstrumentID] = AwarePlatformParameterConfig{ InstrumentID: e.InstrumentID, AwareID: e.AwareID, AwareParameters: make(map[string]*uuid.UUID), diff --git a/api/internal/service/collection_group.go b/api/internal/service/collection_group.go index 784adab9..6aa0ea51 100644 --- a/api/internal/service/collection_group.go +++ b/api/internal/service/collection_group.go @@ -3,54 +3,26 @@ package service import ( "context" - "github.com/USACE/instrumentation-api/api/internal/model" - "github.com/google/uuid" + "github.com/USACE/instrumentation-api/api/internal/db" + "github.com/USACE/instrumentation-api/api/internal/dto" ) -type CollectionGroupService interface { - ListCollectionGroups(ctx context.Context, projectID uuid.UUID) ([]model.CollectionGroup, error) - GetCollectionGroupDetails(ctx context.Context, projectID, collectionGroupID uuid.UUID) (model.CollectionGroupDetails, error) - CreateCollectionGroup(ctx context.Context, cg model.CollectionGroup) (model.CollectionGroup, error) - UpdateCollectionGroup(ctx context.Context, cg model.CollectionGroup) (model.CollectionGroup, error) - DeleteCollectionGroup(ctx context.Context, projectID, collectionGroupID uuid.UUID) error - AddTimeseriesToCollectionGroup(ctx context.Context, collectionGroupID, timeseriesID uuid.UUID) error - RemoveTimeseriesFromCollectionGroup(ctx context.Context, collectionGroupID, timeseriesID uuid.UUID) error +func (s DBService) CollectionGroupCreate(ctx context.Context, cg dto.CollectionGroup) (db.CollectionGroup, error) { + return s.Queries.CollectionGroupCreate(ctx, db.CollectionGroupCreateParams{ + ProjectID: cg.ProjectID, + Name: cg.Name, + CreatedBy: cg.CreatedBy, + CreatedAt: cg.CreatedAt, + SortOrder: cg.SortOrder, + }) } -type collectionGroupService struct { - db *model.Database - *model.Queries -} - -func NewCollectionGroupService(db *model.Database, q *model.Queries) *collectionGroupService { - return &collectionGroupService{db, q} -} - -// GetCollectionGroupDetails returns details for a single CollectionGroup -func (s collectionGroupService) GetCollectionGroupDetails(ctx context.Context, projectID, collectionGroupID uuid.UUID) (model.CollectionGroupDetails, error) { - var a model.CollectionGroupDetails - tx, err := s.db.BeginTxx(ctx, nil) - if err != nil { - return a, err - } - defer model.TxDo(tx.Rollback) - - qtx := s.WithTx(tx) - - cg, err := qtx.GetCollectionGroupDetails(ctx, projectID, collectionGroupID) - if err != nil { - return a, err - } - ts, err := qtx.GetCollectionGroupDetailsTimeseries(ctx, projectID, collectionGroupID) - if err != nil { - return a, err - } - - if err := tx.Commit(); err != nil { - return a, err - } - - cg.Timeseries = ts - - return cg, nil +func (s DBService) CollectionGroupUpdate(ctx context.Context, cg dto.CollectionGroup) (db.CollectionGroup, error) { + return s.Queries.CollectionGroupUpdate(ctx, db.CollectionGroupUpdateParams{ + ID: cg.ID, + ProjectID: cg.ProjectID, + Name: cg.Name, + UpdatedBy: cg.UpdatedBy, + UpdatedAt: cg.UpdatedAt, + }) } diff --git a/api/internal/service/datalogger.go b/api/internal/service/datalogger.go index 5fc5c385..b826f2a8 100644 --- a/api/internal/service/datalogger.go +++ b/api/internal/service/datalogger.go @@ -2,155 +2,174 @@ package service import ( "context" + "errors" + "time" - "github.com/USACE/instrumentation-api/api/internal/model" + "github.com/USACE/instrumentation-api/api/internal/db" + "github.com/USACE/instrumentation-api/api/internal/dto" + "github.com/USACE/instrumentation-api/api/internal/password" "github.com/google/uuid" ) -type DataloggerService interface { - GetDataloggerModelName(ctx context.Context, modelID uuid.UUID) (string, error) - ListProjectDataloggers(ctx context.Context, projectID uuid.UUID) ([]model.Datalogger, error) - ListAllDataloggers(ctx context.Context) ([]model.Datalogger, error) - GetDataloggerIsActive(ctx context.Context, modelName, sn string) (bool, error) - VerifyDataloggerExists(ctx context.Context, dlID uuid.UUID) error - CreateDatalogger(ctx context.Context, n model.Datalogger) (model.DataloggerWithKey, error) - CycleDataloggerKey(ctx context.Context, u model.Datalogger) (model.DataloggerWithKey, error) - GetOneDatalogger(ctx context.Context, dataloggerID uuid.UUID) (model.Datalogger, error) - UpdateDatalogger(ctx context.Context, u model.Datalogger) (model.Datalogger, error) - DeleteDatalogger(ctx context.Context, d model.Datalogger) error - GetDataloggerTablePreview(ctx context.Context, dataloggerTableID uuid.UUID) (model.DataloggerTablePreview, error) - ResetDataloggerTableName(ctx context.Context, dataloggerTableID uuid.UUID) error - GetOrCreateDataloggerTable(ctx context.Context, dataloggerID uuid.UUID, tableName string) (uuid.UUID, error) - DeleteDataloggerTable(ctx context.Context, dataloggerTableID uuid.UUID) error +type DataloggerWithKey struct { + db.VDatalogger + Key string `json:"key"` } -type dataloggerService struct { - db *model.Database - *model.Queries -} - -func NewDataloggerService(db *model.Database, q *model.Queries) *dataloggerService { - return &dataloggerService{db, q} -} +func (s DBService) DataloggerCreate(ctx context.Context, n dto.Datalogger) (DataloggerWithKey, error) { + var a DataloggerWithKey -func (s dataloggerService) CreateDatalogger(ctx context.Context, n model.Datalogger) (model.DataloggerWithKey, error) { - var a model.DataloggerWithKey - tx, err := s.db.BeginTxx(ctx, nil) + tx, err := s.db.Begin(ctx) if err != nil { return a, err } - defer model.TxDo(tx.Rollback) + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) - - dataloggerID, err := qtx.CreateDatalogger(ctx, n) + dataloggerID, err := qtx.DataloggerCreate(ctx, db.DataloggerCreateParams{ + Name: n.Name, + Sn: n.SN, + ProjectID: n.ProjectID, + CreatedBy: n.CreatedBy, + ModelID: n.ModelID, + }) if err != nil { return a, err } - key, err := qtx.CreateDataloggerHash(ctx, dataloggerID) - if err != nil { + key := password.GenerateRandom(40) + hash := password.MustCreateHash(key, password.DefaultParams) + + if err := qtx.DataloggerHashCreate(ctx, db.DataloggerHashCreateParams{ + DataloggerID: dataloggerID, + Hash: hash, + }); err != nil { return a, err } - - dl, err := qtx.GetOneDatalogger(ctx, dataloggerID) + dl, err := qtx.DataloggerGet(ctx, dataloggerID) if err != nil { return a, err } - - if err := tx.Commit(); err != nil { + if err := tx.Commit(ctx); err != nil { return a, err } - - dk := model.DataloggerWithKey{ - Datalogger: dl, - Key: key, + dk := DataloggerWithKey{ + VDatalogger: dl, + Key: key, } - return dk, nil } -func (s dataloggerService) CycleDataloggerKey(ctx context.Context, u model.Datalogger) (model.DataloggerWithKey, error) { - var a model.DataloggerWithKey - tx, err := s.db.BeginTxx(ctx, nil) +func (s DBService) DataloggerHashUpdate(ctx context.Context, arg dto.Datalogger) (DataloggerWithKey, error) { + var a DataloggerWithKey + if arg.UpdatedBy == nil { + return a, errors.New("must supply updater profile id") + } + + tx, err := s.db.Begin(ctx) if err != nil { return a, err } - defer model.TxDo(tx.Rollback) + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) + key := password.GenerateRandom(40) + hash := password.MustCreateHash(key, password.DefaultParams) - key, err := qtx.UpdateDataloggerHash(ctx, u.ID) - if err != nil { + if err := qtx.DataloggerHashUpdate(ctx, db.DataloggerHashUpdateParams{ + DataloggerID: arg.ID, + Hash: hash, + }); err != nil { return a, err } - if err := qtx.UpdateDataloggerUpdater(ctx, u); err != nil { + t := time.Now() + if err := qtx.DataloggerUpdateAuditInfo(ctx, db.DataloggerUpdateAuditInfoParams{ + ID: arg.ID, + UpdatedBy: arg.UpdatedBy, + UpdatedAt: &t, + }); err != nil { return a, err } - dl, err := qtx.GetOneDatalogger(ctx, u.ID) + dl, err := qtx.DataloggerGet(ctx, arg.ID) if err != nil { return a, err } - if err := tx.Commit(); err != nil { + if err := tx.Commit(ctx); err != nil { return a, err } - dk := model.DataloggerWithKey{ - Datalogger: dl, - Key: key, + dk := DataloggerWithKey{ + VDatalogger: dl, + Key: key, } return dk, nil } -func (s dataloggerService) UpdateDatalogger(ctx context.Context, u model.Datalogger) (model.Datalogger, error) { - var a model.Datalogger - tx, err := s.db.BeginTxx(ctx, nil) +func (s DBService) DataloggerUpdate(ctx context.Context, u dto.Datalogger) (db.VDatalogger, error) { + var a db.VDatalogger + tx, err := s.db.Begin(ctx) if err != nil { return a, err } - defer model.TxDo(tx.Rollback) + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) - if err := qtx.UpdateDatalogger(ctx, u); err != nil { + if u.UpdatedBy == nil { + return a, errors.New("must set updater id") + } + + t := time.Now() + if err := qtx.DataloggerUpdate(ctx, db.DataloggerUpdateParams{ + ID: u.ID, + Name: u.Name, + UpdatedBy: u.UpdatedBy, + UpdatedAt: &t, + }); err != nil { return a, err } - dlUpdated, err := qtx.GetOneDatalogger(ctx, u.ID) + dlUpdated, err := qtx.DataloggerGet(ctx, u.ID) if err != nil { return a, err } - if err := tx.Commit(); err != nil { + if err := tx.Commit(ctx); err != nil { return a, err } return dlUpdated, nil } -func (s dataloggerTelemetryService) GetOrCreateDataloggerTable(ctx context.Context, dataloggerID uuid.UUID, tableName string) (uuid.UUID, error) { - tx, err := s.db.BeginTxx(ctx, nil) +func (s DBService) DataloggerTableGetOrCreate(ctx context.Context, dataloggerID uuid.UUID, tableName string) (uuid.UUID, error) { + tx, err := s.db.Begin(ctx) if err != nil { return uuid.Nil, err } - defer model.TxDo(tx.Rollback) + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) - if err := qtx.RenameEmptyDataloggerTableName(ctx, dataloggerID, tableName); err != nil { + if err := qtx.DataloggerTableUpdateNameIfEmpty(ctx, db.DataloggerTableUpdateNameIfEmptyParams{ + DataloggerID: dataloggerID, + TableName: tableName, + }); err != nil { return uuid.Nil, err } - dataloggerTableID, err := qtx.GetOrCreateDataloggerTable(ctx, dataloggerID, tableName) + dataloggerTableID, err := qtx.DataloggerTableGetOrCreate(ctx, db.DataloggerTableGetOrCreateParams{ + DataloggerID: dataloggerID, + TableName: tableName, + }) if err != nil { return uuid.Nil, err } - if err := tx.Commit(); err != nil { + if err := tx.Commit(ctx); err != nil { return uuid.Nil, err } diff --git a/api/internal/service/datalogger_parser.go b/api/internal/service/datalogger_parser.go new file mode 100644 index 00000000..7387707d --- /dev/null +++ b/api/internal/service/datalogger_parser.go @@ -0,0 +1,96 @@ +package service + +import ( + "context" + "encoding/csv" + "io" + + "github.com/USACE/instrumentation-api/api/internal/db" + "github.com/USACE/instrumentation-api/api/internal/dto" + "github.com/google/uuid" +) + +// datalogger toa5/dat parser +func (s DBService) TimeseriesMeasurementCreateBatchForDataloggerFromTOA5File(ctx context.Context, r io.Reader) error { + tx, err := s.db.Begin(ctx) + if err != nil { + return err + } + defer txDo(ctx, tx.Rollback) + qtx := s.WithTx(tx) + + reader := csv.NewReader(r) + + envHeader, err := reader.Read() + if err != nil { + return err + } + fieldHeader, err := reader.Read() + if err != nil { + return err + } + // skip units header + _, err = reader.Read() + if err != nil { + return err + } + // skip process header + _, err = reader.Read() + if err != nil { + return err + } + + meta := dto.Environment{ + // StationName: envHeader[1], + Model: envHeader[2], + SerialNo: envHeader[3], + // OSVersion: envHeader[4], + // ProgName: envHeader[5], + TableName: envHeader[6], + } + + dl, err := qtx.DataloggerGetForModelSn(ctx, db.DataloggerGetForModelSnParams{ + Model: &meta.Model, + Sn: meta.SerialNo, + }) + if err != nil { + return err + } + tableID, err := qtx.DataloggerTableGetOrCreate(ctx, db.DataloggerTableGetOrCreateParams{ + DataloggerID: dl.ID, + TableName: meta.TableName, + }) + if err != nil { + return err + } + + // first two columns are timestamp and record number + // we only want to collect the measurement fields here + fields := make([]string, len(fieldHeader)-2) + for i := 2; i < len(fieldHeader); i++ { + fields[i] = fieldHeader[i] + } + + eqt, err := qtx.EquivalencyTableGet(ctx, tableID) + if err != nil { + return err + } + + fieldNameTimeseriesIDMap := make(map[string]uuid.UUID) + for _, eqtRow := range eqt.Fields { + if eqtRow.TimeseriesID == nil { + continue + } + fieldNameTimeseriesIDMap[eqtRow.FieldName] = *eqtRow.TimeseriesID + } + + if err := timeseriesMeasurementNoteCreateOrUpdateBatch(ctx, qtx, reader, timeseriesMeasurementNoteCreateOrUpdateBatchParams{ + fields: fields, + fieldNameTimeseriesIDMap: fieldNameTimeseriesIDMap, + timezone: "UTC", + }); err != nil { + return err + } + + return tx.Commit(ctx) +} diff --git a/api/internal/service/datalogger_telemetry.go b/api/internal/service/datalogger_telemetry.go index aee3e441..eef72bfd 100644 --- a/api/internal/service/datalogger_telemetry.go +++ b/api/internal/service/datalogger_telemetry.go @@ -3,79 +3,274 @@ package service import ( "context" "database/sql" + "encoding/csv" "errors" + "fmt" + "io" + "math" + "strconv" + "time" - "github.com/USACE/instrumentation-api/api/internal/model" + "github.com/USACE/instrumentation-api/api/internal/db" + "github.com/USACE/instrumentation-api/api/internal/dto" "github.com/google/uuid" ) -type DataloggerTelemetryService interface { - GetDataloggerByModelSN(ctx context.Context, modelName, sn string) (model.Datalogger, error) - GetDataloggerHashByModelSN(ctx context.Context, modelName, sn string) (string, error) - CreateDataloggerTablePreview(ctx context.Context, prv model.DataloggerTablePreview) error - UpdateDataloggerTablePreview(ctx context.Context, dataloggerID uuid.UUID, tableName string, prv model.DataloggerTablePreview) (uuid.UUID, error) - UpdateDataloggerTableError(ctx context.Context, dataloggerID uuid.UUID, tableName *string, e *model.DataloggerError) error -} - -type dataloggerTelemetryService struct { - db *model.Database - *model.Queries -} - -func NewDataloggerTelemetryService(db *model.Database, q *model.Queries) *dataloggerTelemetryService { - return &dataloggerTelemetryService{db, q} +func (s DBService) DataloggerTablePreviewCreate(ctx context.Context, prv dto.DataloggerTablePreview) error { + return s.Queries.DataloggerTablePreviewCreate(ctx, db.DataloggerTablePreviewCreateParams{ + DataloggerTableID: prv.DataloggerTableID, + UpdatedAt: prv.UpdatedAt, + Preview: prv.Preview, + }) } // UpdateDataloggerTablePreview attempts to update a table preview by datalogger_id and table_name, creates the // datalogger table and corresponding preview if it doesn't exist -func (s dataloggerTelemetryService) UpdateDataloggerTablePreview(ctx context.Context, dataloggerID uuid.UUID, tableName string, prv model.DataloggerTablePreview) (uuid.UUID, error) { - tx, err := s.db.BeginTxx(ctx, nil) +func (s DBService) DataloggerTablePreviewUpdate(ctx context.Context, dataloggerID uuid.UUID, tableName string, prv dto.DataloggerTablePreview) (uuid.UUID, error) { + tx, err := s.db.Begin(ctx) if err != nil { return uuid.Nil, err } - defer model.TxDo(tx.Rollback) + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) // replace empty datalogger table name with most recent payload - if err := qtx.RenameEmptyDataloggerTableName(ctx, dataloggerID, tableName); err != nil { + if err := qtx.DataloggerTableUpdateNameIfEmpty(ctx, db.DataloggerTableUpdateNameIfEmptyParams{ + DataloggerID: dataloggerID, + TableName: tableName, + }); err != nil { return uuid.Nil, err } - tableID, err := qtx.GetOrCreateDataloggerTable(ctx, dataloggerID, tableName) + tableID, err := qtx.DataloggerTableGetOrCreate(ctx, db.DataloggerTableGetOrCreateParams{ + DataloggerID: dataloggerID, + TableName: tableName, + }) if err != nil { return uuid.Nil, err } - if err := qtx.UpdateDataloggerTablePreview(ctx, dataloggerID, tableName, prv); err != nil { + if err := qtx.DataloggerTablePreviewUpdate(ctx, db.DataloggerTablePreviewUpdateParams{ + DataloggerID: dataloggerID, + TableName: tableName, + Preview: prv.Preview, + UpdatedAt: prv.UpdatedAt, + }); err != nil { if !errors.Is(err, sql.ErrNoRows) { return uuid.Nil, err } prv.DataloggerTableID = tableID - if err := qtx.CreateDataloggerTablePreview(ctx, prv); err != nil { + if err := qtx.DataloggerTablePreviewCreate(ctx, db.DataloggerTablePreviewCreateParams{ + DataloggerTableID: prv.DataloggerTableID, + Preview: prv.Preview, + UpdatedAt: prv.UpdatedAt, + }); err != nil { } } - return tableID, tx.Commit() + return tableID, tx.Commit(ctx) } -func (s dataloggerTelemetryService) UpdateDataloggerTableError(ctx context.Context, dataloggerID uuid.UUID, tableName *string, e *model.DataloggerError) error { - tx, err := s.db.BeginTxx(ctx, nil) +func (s DBService) DataloggerTableErrorUpdate(ctx context.Context, dataloggerID uuid.UUID, tableName *string, e *dto.DataloggerError) error { + if tableName == nil { + return errors.New("table name must not be nil") + } + + tx, err := s.db.Begin(ctx) + if err != nil { + return err + } + defer txDo(ctx, tx.Rollback) + qtx := s.WithTx(tx) + + if err := qtx.DataloggerErrorDelete(ctx, db.DataloggerErrorDeleteParams{ + DataloggerID: dataloggerID, + TableName: *tableName, + }); err != nil { + return err + } + if len(e.Errors) == 0 { + return tx.Commit(ctx) + } + + ee := make([]db.DataloggerErrorCreateBatchParams, len(e.Errors)) + for idx, m := range e.Errors { + ee[idx] = db.DataloggerErrorCreateBatchParams{ + DataloggerID: dataloggerID, + TableName: *tableName, + ErrorMessage: &m, + } + } + qtx.DataloggerErrorCreateBatch(ctx, ee).Exec(batchExecErr(&err)) if err != nil { return err } - defer model.TxDo(tx.Rollback) + return tx.Commit(ctx) +} + +// ParseTOA5 parses a Campbell Scientific TOA5 data file that is simlar to a csv. +// The unique properties of TOA5 are that the meatdata are stored in header of file (first 4 lines of csv) +func (s DBService) TimeseriesMeasurementCreateOrUpdateDataloggerTOA5Upload(ctx context.Context, r io.Reader) error { + tx, err := s.db.Begin(ctx) + if err != nil { + return err + } + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) - if err := qtx.DeleteDataloggerTableError(ctx, dataloggerID, tableName); err != nil { + reader := csv.NewReader(r) + + envHeader, err := reader.Read() + if err != nil { + return err + } + fieldHeader, err := reader.Read() + if err != nil { + return err + } + unitsHeader, err := reader.Read() + if err != nil { + return err + } + processHeader, err := reader.Read() + if err != nil { + return err + } + + meta := dto.Environment{ + StationName: envHeader[1], + Model: envHeader[2], + SerialNo: envHeader[3], + OSVersion: envHeader[4], + ProgName: envHeader[5], + TableName: envHeader[6], + } + + dl, err := qtx.DataloggerGetForModelSn(ctx, db.DataloggerGetForModelSnParams{ + Model: &meta.Model, + Sn: meta.SerialNo, + }) + if err != nil { return err } - for _, m := range e.Errors { - if err := qtx.CreateDataloggerTableError(ctx, dataloggerID, tableName, m); err != nil { - return err + tableID, err := qtx.DataloggerTableGetOrCreate(ctx, db.DataloggerTableGetOrCreateParams{ + DataloggerID: dl.ID, + TableName: meta.TableName, + }) + if err != nil { + return err + } + + em := make([]string, 0) + defer func() { + s.DataloggerTableErrorUpdate(ctx, dl.ID, &meta.TableName, &dto.DataloggerError{Errors: em}) + }() + + // first two columns are timestamp and record number + // we only want to collect the measurement fields here + fields := make([]dto.Field, len(fieldHeader)-2) + for i := 2; i < len(fieldHeader); i++ { + fields[i] = dto.Field{ + Name: fieldHeader[i], + Units: unitsHeader[i], + Process: processHeader[i], } } - return tx.Commit() + eqt, err := qtx.EquivalencyTableGet(ctx, tableID) + if err != nil { + return err + } + + fieldNameTimeseriesIDMap := make(map[string]uuid.UUID) + for _, eqtRow := range eqt.Fields { + fieldNameTimeseriesIDMap[eqtRow.FieldName] = *eqtRow.TimeseriesID + } + + chunkSize := 1_000 + mmtParams := make([]db.TimeseriesMeasurementCreateOrUpdateBatchParams, chunkSize) + noteParams := make([]db.TimeseriesNoteCreateOrUpdateBatchParams, chunkSize) + var mIdx, nIdx int + for { + record, err := reader.Read() + if err == io.EOF { + break + } + if err != nil { + return err + } + t, err := time.Parse(record[0], time.RFC3339) + if err != nil { + return err + } + for idx, cell := range record[2:] { + fieldName := fields[idx].Name + tsID, ok := fieldNameTimeseriesIDMap[fieldName] + if !ok { + // key error, field_name does not exist for equivalency table + // add error to Measurement payload to report back to user + em = append(em, fmt.Sprintf( + "key error: field_name %s does not exist for equivalency table %s", + fieldName, meta.TableName, + )) + continue + } + v, err := strconv.ParseFloat(cell, 64) + if err != nil { + v = math.NaN() + } + if math.IsNaN(v) || math.IsInf(v, 0) { + em = append(em, fmt.Sprintf( + "warning: field_name %s contains invalid value entry at %s (NAN or INF)", + fieldName, t, + )) + masked := true + noteParams[nIdx] = db.TimeseriesNoteCreateOrUpdateBatchParams{ + TimeseriesID: tsID, + Time: t, + Masked: &masked, + } + nIdx++ + if nIdx == chunkSize { + var err error + qtx.TimeseriesNoteCreateOrUpdateBatch(ctx, noteParams).Exec(batchExecErr(&err)) + if err != nil { + return err + } + nIdx = 0 + } + } + mmtParams[mIdx] = db.TimeseriesMeasurementCreateOrUpdateBatchParams{ + TimeseriesID: tsID, + Time: t, + Value: v, + } + mIdx++ + if mIdx == chunkSize { + var err error + qtx.TimeseriesMeasurementCreateOrUpdateBatch(ctx, mmtParams).Exec(batchExecErr(&err)) + if err != nil { + return err + } + mIdx = 0 + } + } + } + if mIdx != 0 { + var err error + qtx.TimeseriesMeasurementCreateOrUpdateBatch(ctx, mmtParams[:mIdx]).Exec(batchExecErr(&err)) + if err != nil { + return err + } + } + if nIdx != 0 { + var err error + qtx.TimeseriesNoteCreateOrUpdateBatch(ctx, noteParams[:nIdx]).Exec(batchExecErr(&err)) + if err != nil { + return err + } + } + return tx.Commit(ctx) } diff --git a/api/internal/service/db.go b/api/internal/service/db.go new file mode 100644 index 00000000..0e904575 --- /dev/null +++ b/api/internal/service/db.go @@ -0,0 +1,83 @@ +package service + +import ( + "context" + "errors" + "fmt" + "log" + + "github.com/USACE/instrumentation-api/api/internal/config" + gen "github.com/USACE/instrumentation-api/api/internal/db" + "github.com/jackc/pgx/v5" + "github.com/jackc/pgx/v5/pgxpool" +) + +type DBService struct { + db *DatabasePool + *gen.Queries +} + +type DatabasePool struct { + *pgxpool.Pool +} + +func NewDBService(cfg config.DBConfig) *DBService { + dbpool := newDatabasePool(cfg) + q := gen.New(dbpool) + return &DBService{dbpool, q} +} + +func newDatabasePool(cfg config.DBConfig) *DatabasePool { + config, err := pgxpool.ParseConfig(cfg.ConnStr()) + if err != nil { + log.Fatal(err.Error()) + } + pool, err := pgxpool.NewWithConfig(context.Background(), config) + if err != nil { + log.Fatal(err.Error()) + } + if err := pool.Ping(context.Background()); err != nil { + log.Fatal(err.Error()) + } + return &DatabasePool{pool} +} + +func txDo(ctx context.Context, rollback func(ctx context.Context) error) { + err := rollback(ctx) + if err != nil && !errors.Is(err, pgx.ErrTxClosed) { + log.Print(err.Error()) + } +} + +func batchExecErr(err *error) func(int, error) { + return func(_ int, e error) { + if e != nil { + *err = e + return + } + } +} + +func batchQueryRowErr[T any](err *error) func(int, T, error) { + return func(_ int, _ T, e error) { + if e != nil { + *err = e + return + } + } +} + +func batchQueryRowCollect[T any](rr []T, err *error) func(int, T, error) { + rrlen := len(rr) + return func(i int, r T, e error) { + if e != nil { + *err = e + return + } + if i == rrlen { + *err = fmt.Errorf("rr slice must be same length as QueryRow args") + return + } + rr[i] = r + } +} diff --git a/api/internal/service/dcsloader.go b/api/internal/service/dcsloader.go index f1fd6880..739f4357 100644 --- a/api/internal/service/dcsloader.go +++ b/api/internal/service/dcsloader.go @@ -13,14 +13,14 @@ import ( "time" "github.com/USACE/instrumentation-api/api/internal/config" - "github.com/USACE/instrumentation-api/api/internal/model" + "github.com/USACE/instrumentation-api/api/internal/dto" "github.com/USACE/instrumentation-api/api/internal/util" "github.com/google/uuid" ) type DcsLoaderService interface { - ParseCsvMeasurementCollection(r io.Reader) ([]model.MeasurementCollection, int, error) - PostMeasurementCollectionToApi(mcs []model.MeasurementCollection) error + ParseCsvMeasurementCollection(r io.Reader) ([]dto.MeasurementCollection, int, error) + PostMeasurementCollectionToApi(mcs []dto.MeasurementCollection) error } type dcsLoaderService struct { @@ -32,8 +32,8 @@ func NewDcsLoaderService(apiClient *http.Client, cfg *config.DcsLoaderConfig) *d return &dcsLoaderService{apiClient, cfg} } -func (s dcsLoaderService) ParseCsvMeasurementCollection(r io.Reader) ([]model.MeasurementCollection, int, error) { - mcs := make([]model.MeasurementCollection, 0) +func (s dcsLoaderService) ParseCsvMeasurementCollection(r io.Reader) ([]dto.MeasurementCollection, int, error) { + mcs := make([]dto.MeasurementCollection, 0) mCount := 0 reader := csv.NewReader(r) @@ -49,7 +49,7 @@ func (s dcsLoaderService) ParseCsvMeasurementCollection(r io.Reader) ([]model.Me rows = append(rows, row) } - mcMap := make(map[uuid.UUID]*model.MeasurementCollection) + mcMap := make(map[uuid.UUID]*dto.MeasurementCollection) for _, row := range rows { // 0=timeseries_id, 1=time, 2=value tsid, err := uuid.Parse(row[0]) @@ -66,16 +66,16 @@ func (s dcsLoaderService) ParseCsvMeasurementCollection(r io.Reader) ([]model.Me } if _, ok := mcMap[tsid]; !ok { - mcMap[tsid] = &model.MeasurementCollection{ + mcMap[tsid] = &dto.MeasurementCollection{ TimeseriesID: tsid, - Items: make([]model.Measurement, 0), + Items: make([]dto.Measurement, 0), } } - mcMap[tsid].Items = append(mcMap[tsid].Items, model.Measurement{TimeseriesID: tsid, Time: t, Value: model.FloatNanInf(v)}) + mcMap[tsid].Items = append(mcMap[tsid].Items, dto.Measurement{TimeseriesID: tsid, Time: t, Value: dto.FloatNanInf(v)}) mCount++ } - mcs = make([]model.MeasurementCollection, len(mcMap)) + mcs = make([]dto.MeasurementCollection, len(mcMap)) idx := 0 for _, v := range mcMap { mcs[idx] = *v @@ -85,7 +85,7 @@ func (s dcsLoaderService) ParseCsvMeasurementCollection(r io.Reader) ([]model.Me return mcs, mCount, nil } -func (s dcsLoaderService) PostMeasurementCollectionToApi(mcs []model.MeasurementCollection) error { +func (s dcsLoaderService) PostMeasurementCollectionToApi(mcs []dto.MeasurementCollection) error { requestBodyBytes, err := json.Marshal(mcs) if err != nil { return err diff --git a/api/internal/service/district_rollup.go b/api/internal/service/district_rollup.go deleted file mode 100644 index cecf29f7..00000000 --- a/api/internal/service/district_rollup.go +++ /dev/null @@ -1,22 +0,0 @@ -package service - -import ( - "context" - - "github.com/USACE/instrumentation-api/api/internal/model" - "github.com/google/uuid" -) - -type DistrictRollupService interface { - ListEvaluationDistrictRollup(ctx context.Context, opID uuid.UUID, tw model.TimeWindow) ([]model.DistrictRollup, error) - ListMeasurementDistrictRollup(ctx context.Context, opID uuid.UUID, tw model.TimeWindow) ([]model.DistrictRollup, error) -} - -type districtRollupService struct { - db *model.Database - *model.Queries -} - -func NewDistrictRollupService(db *model.Database, q *model.Queries) *districtRollupService { - return &districtRollupService{db, q} -} diff --git a/api/internal/service/domain.go b/api/internal/service/domain.go index 9f7fc6e5..5aacb298 100644 --- a/api/internal/service/domain.go +++ b/api/internal/service/domain.go @@ -3,19 +3,19 @@ package service import ( "context" - "github.com/USACE/instrumentation-api/api/internal/model" + "github.com/USACE/instrumentation-api/api/internal/db" ) -type DomainService interface { - GetDomains(ctx context.Context) ([]model.Domain, error) - GetDomainMap(ctx context.Context) (model.DomainMap, error) -} - -type domainService struct { - db *model.Database - *model.Queries -} +type DomainMap map[string][]db.DomainGroupOpt -func NewDomainService(db *model.Database, q *model.Queries) *domainService { - return &domainService{db, q} +func (s DBService) DomainMapGet(ctx context.Context) (DomainMap, error) { + a := make(DomainMap) + gg, err := s.Queries.DomainGroupList(ctx) + if err != nil { + return a, err + } + for _, g := range gg { + a[g.Group] = g.Opts + } + return a, nil } diff --git a/api/internal/service/equivalency_table.go b/api/internal/service/equivalency_table.go index 3ed47a9d..71942882 100644 --- a/api/internal/service/equivalency_table.go +++ b/api/internal/service/equivalency_table.go @@ -2,88 +2,89 @@ package service import ( "context" + "errors" - "github.com/USACE/instrumentation-api/api/internal/model" - "github.com/google/uuid" + "github.com/USACE/instrumentation-api/api/internal/db" + "github.com/USACE/instrumentation-api/api/internal/dto" ) -type EquivalencyTableService interface { - GetEquivalencyTable(ctx context.Context, dataloggerTableID uuid.UUID) (model.EquivalencyTable, error) - CreateOrUpdateEquivalencyTable(ctx context.Context, t model.EquivalencyTable) (model.EquivalencyTable, error) - UpdateEquivalencyTable(ctx context.Context, t model.EquivalencyTable) (model.EquivalencyTable, error) - DeleteEquivalencyTable(ctx context.Context, dataloggerTableID uuid.UUID) error - DeleteEquivalencyTableRow(ctx context.Context, rowID uuid.UUID) error - GetIsValidDataloggerTable(ctx context.Context, dataloggerTableID uuid.UUID) error -} - -type equivalencyTableService struct { - db *model.Database - *model.Queries -} - -func NewEquivalencyTableService(db *model.Database, q *model.Queries) *equivalencyTableService { - return &equivalencyTableService{db, q} -} +func (s DBService) EquivalencyTableCreateOrUpdate(ctx context.Context, t dto.EquivalencyTable) (db.VDataloggerEquivalencyTable, error) { + var a db.VDataloggerEquivalencyTable -// CreateEquivalencyTable creates EquivalencyTable rows -// If a row with the given datalogger id or field name already exists the row will be ignored -func (s equivalencyTableService) CreateOrUpdateEquivalencyTable(ctx context.Context, t model.EquivalencyTable) (model.EquivalencyTable, error) { - tx, err := s.db.BeginTxx(ctx, nil) + tx, err := s.db.Begin(ctx) if err != nil { - return model.EquivalencyTable{}, err + return a, err } - defer model.TxDo(tx.Rollback) - + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) for _, r := range t.Rows { if r.TimeseriesID != nil { - if err = qtx.GetIsValidEquivalencyTableTimeseries(ctx, *r.TimeseriesID); err != nil { - return model.EquivalencyTable{}, err + valid, err := qtx.EquivalencyTableTimeseriesGetIsValid(ctx, *r.TimeseriesID) + if err != nil { + return a, err + } + if !valid { + return a, errors.New("equivalency table timeseries invalid") } } - if err := qtx.CreateOrUpdateEquivalencyTableRow(ctx, t.DataloggerID, t.DataloggerTableID, r); err != nil { - return model.EquivalencyTable{}, err + if err := qtx.EquivalencyTableCreateOrUpdate(ctx, db.EquivalencyTableCreateOrUpdateParams{ + DataloggerID: t.DataloggerID, + DataloggerTableID: &t.DataloggerTableID, + FieldName: r.FieldName, + DisplayName: &r.DisplayName, + InstrumentID: r.InstrumentID, + TimeseriesID: r.TimeseriesID, + }); err != nil { + return a, err } } - eqt, err := qtx.GetEquivalencyTable(ctx, t.DataloggerTableID) + eqt, err := qtx.EquivalencyTableGet(ctx, t.DataloggerTableID) if err != nil { - return model.EquivalencyTable{}, err + return a, err } - if err := tx.Commit(); err != nil { - return model.EquivalencyTable{}, err + if err := tx.Commit(ctx); err != nil { + return a, err } return eqt, nil } -// UpdateEquivalencyTable updates rows of an EquivalencyTable -func (s equivalencyTableService) UpdateEquivalencyTable(ctx context.Context, t model.EquivalencyTable) (model.EquivalencyTable, error) { - tx, err := s.db.BeginTxx(ctx, nil) +func (s DBService) EquivalencyTableUpdate(ctx context.Context, t dto.EquivalencyTable) (db.VDataloggerEquivalencyTable, error) { + var a db.VDataloggerEquivalencyTable + tx, err := s.db.Begin(ctx) if err != nil { - return model.EquivalencyTable{}, err + return a, err } - defer model.TxDo(tx.Rollback) + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) for _, r := range t.Rows { if r.TimeseriesID != nil { - if err = qtx.GetIsValidEquivalencyTableTimeseries(ctx, *r.TimeseriesID); err != nil { - return model.EquivalencyTable{}, err + valid, err := qtx.EquivalencyTableTimeseriesGetIsValid(ctx, *r.TimeseriesID) + if err != nil { + return a, err + } + if !valid { + return a, errors.New("equivalency table timeseries invalid") } } - if err := qtx.UpdateEquivalencyTableRow(ctx, r); err != nil { - return model.EquivalencyTable{}, err + if err := qtx.EquivalencyTableUpdate(ctx, db.EquivalencyTableUpdateParams{ + ID: r.ID, + FieldName: r.FieldName, + DisplayName: &r.DisplayName, + }); err != nil { + return a, err } } - eqt, err := qtx.GetEquivalencyTable(ctx, t.DataloggerTableID) + eqt, err := qtx.EquivalencyTableGet(ctx, t.DataloggerTableID) - if err := tx.Commit(); err != nil { - return model.EquivalencyTable{}, err + if err := tx.Commit(ctx); err != nil { + return a, err } return eqt, nil diff --git a/api/internal/service/evaluation.go b/api/internal/service/evaluation.go index 2525a36a..83782d0f 100644 --- a/api/internal/service/evaluation.go +++ b/api/internal/service/evaluation.go @@ -3,150 +3,149 @@ package service import ( "context" - "github.com/USACE/instrumentation-api/api/internal/model" + "github.com/USACE/instrumentation-api/api/internal/db" + "github.com/USACE/instrumentation-api/api/internal/dto" "github.com/google/uuid" ) -type EvaluationService interface { - ListProjectEvaluations(ctx context.Context, projectID uuid.UUID) ([]model.Evaluation, error) - ListProjectEvaluationsByAlertConfig(ctx context.Context, projectID, alertConfigID uuid.UUID) ([]model.Evaluation, error) - ListInstrumentEvaluations(ctx context.Context, instrumentID uuid.UUID) ([]model.Evaluation, error) - GetEvaluation(ctx context.Context, evaluationID uuid.UUID) (model.Evaluation, error) - RecordEvaluationSubmittal(ctx context.Context, subID uuid.UUID) error - CreateEvaluation(ctx context.Context, ev model.Evaluation) (model.Evaluation, error) - UpdateEvaluation(ctx context.Context, evaluationID uuid.UUID, ev model.Evaluation) (model.Evaluation, error) - DeleteEvaluation(ctx context.Context, evaluationID uuid.UUID) error -} - -type evaluationService struct { - db *model.Database - *model.Queries -} - -func NewEvaluationService(db *model.Database, q *model.Queries) *evaluationService { - return &evaluationService{db, q} -} - -func (s evaluationService) RecordEvaluationSubmittal(ctx context.Context, subID uuid.UUID) error { - tx, err := s.db.BeginTxx(ctx, nil) +func (s DBService) EvaluationSubmittalUpdateCompleteCreateNext(ctx context.Context, subID uuid.UUID) error { + tx, err := s.db.Begin(ctx) if err != nil { return err } - defer model.TxDo(tx.Rollback) - + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) - sub, err := qtx.CompleteEvaluationSubmittal(ctx, subID) + sub, err := qtx.SubmittalUpdateCompleteEvaluation(ctx, subID) if err != nil { return err } - // Create next submittal if submitted on-time // late submittals will have already generated next submittal - if sub.SubmittalStatusID == GreenSubmittalStatusID { - if err := qtx.CreateNextEvaluationSubmittal(ctx, subID); err != nil { + if sub.SubmittalStatusID != nil && *sub.SubmittalStatusID == dto.GreenSubmittalStatusID { + if err := qtx.SubmittalCreateNextEvaluation(ctx, subID); err != nil { return err } } - return tx.Commit() + return tx.Commit(ctx) } -func (s evaluationService) CreateEvaluation(ctx context.Context, ev model.Evaluation) (model.Evaluation, error) { - var a model.Evaluation - tx, err := s.db.BeginTxx(ctx, nil) +func (s DBService) EvaluationCreate(ctx context.Context, ev dto.Evaluation) (db.VEvaluation, error) { + var a db.VEvaluation + tx, err := s.db.Begin(ctx) if err != nil { return a, err } - defer model.TxDo(tx.Rollback) - + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) if ev.SubmittalID != nil { - sub, err := qtx.CompleteEvaluationSubmittal(ctx, *ev.SubmittalID) + sub, err := qtx.SubmittalUpdateCompleteEvaluation(ctx, *ev.SubmittalID) if err != nil { return a, err } // Create next submittal if submitted on-time // late submittals will have already generated next submittal - if sub.SubmittalStatusID == GreenSubmittalStatusID { - qtx.CreateNextEvaluationSubmittal(ctx, *ev.SubmittalID) + if sub.SubmittalStatusID != nil && *sub.SubmittalStatusID == dto.GreenSubmittalStatusID { + qtx.SubmittalCreateNextEvaluation(ctx, *ev.SubmittalID) } } - - evID, err := qtx.CreateEvaluation(ctx, ev) + evID, err := qtx.EvaluationCreate(ctx, db.EvaluationCreateParams{ + ProjectID: ev.ProjectID, + SubmittalID: ev.SubmittalID, + Name: ev.Name, + Body: ev.Body, + StartedAt: ev.StartedAt, + EndedAt: ev.EndedAt, + CreatedBy: ev.CreatedBy, + CreatedAt: ev.CreatedAt, + }) if err != nil { return a, err } - - for _, aci := range ev.Instruments { - if err := qtx.CreateEvaluationInstrument(ctx, evID, aci.InstrumentID); err != nil { - return a, err + args := make([]db.EvaluationInstrumentCreateBatchParams, len(ev.Instruments)) + for idx, aci := range ev.Instruments { + args[idx] = db.EvaluationInstrumentCreateBatchParams{ + EvaluationID: &evID, + InstrumentID: &aci.InstrumentID, } } - - evNew, err := qtx.GetEvaluation(ctx, evID) + qtx.EvaluationInstrumentCreateBatch(ctx, args).Exec(batchExecErr(&err)) if err != nil { return a, err } - - if err := tx.Commit(); err != nil { + a, err = qtx.EvaluationGet(ctx, evID) + if err != nil { + return a, err + } + if err := tx.Commit(ctx); err != nil { return a, err } - return evNew, nil + return a, nil } -func (s evaluationService) UpdateEvaluation(ctx context.Context, evaluationID uuid.UUID, ev model.Evaluation) (model.Evaluation, error) { - var a model.Evaluation - tx, err := s.db.BeginTxx(ctx, nil) +func (s DBService) EvaluationUpdate(ctx context.Context, evaluationID uuid.UUID, ev dto.Evaluation) (db.VEvaluation, error) { + var a db.VEvaluation + tx, err := s.db.Begin(ctx) if err != nil { return a, err } - defer model.TxDo(tx.Rollback) - + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) - if err := qtx.UpdateEvaluation(ctx, ev); err != nil { + if err := qtx.EvaluationUpdate(ctx, db.EvaluationUpdateParams{ + ID: ev.ID, + ProjectID: ev.ProjectID, + Name: ev.Name, + Body: ev.Body, + StartedAt: ev.StartedAt, + EndedAt: ev.EndedAt, + UpdatedBy: ev.UpdatedBy, + UpdatedAt: ev.UpdatedAt, + }); err != nil { return a, err } - - if err := qtx.UnassignAllInstrumentsFromEvaluation(ctx, ev.ID); err != nil { + if err := qtx.EvaluationInstrumentDeleteForEvaluation(ctx, &ev.ID); err != nil { return a, err } - - for _, aci := range ev.Instruments { - if err := qtx.CreateEvaluationInstrument(ctx, ev.ID, aci.InstrumentID); err != nil { - return a, err + args := make([]db.EvaluationInstrumentCreateBatchParams, len(ev.Instruments)) + for idx, aci := range ev.Instruments { + args[idx] = db.EvaluationInstrumentCreateBatchParams{ + EvaluationID: &evaluationID, + InstrumentID: &aci.InstrumentID, } } + qtx.EvaluationInstrumentCreateBatch(ctx, args).Exec(batchExecErr(&err)) + if err != nil { + return a, err + } - evUpdated, err := qtx.GetEvaluation(ctx, ev.ID) + a, err = qtx.EvaluationGet(ctx, ev.ID) if err != nil { return a, err } - if err := tx.Commit(); err != nil { + if err := tx.Commit(ctx); err != nil { return a, err } - return evUpdated, nil + return a, nil } -func (s evaluationService) DeleteEvaluation(ctx context.Context, evaluationID uuid.UUID) error { - tx, err := s.db.BeginTxx(ctx, nil) +func (s DBService) EvaluationDelete(ctx context.Context, evaluationID uuid.UUID) error { + tx, err := s.db.Begin(ctx) if err != nil { return err } - defer model.TxDo(tx.Rollback) - + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) - if err := qtx.UnassignAllInstrumentsFromEvaluation(ctx, evaluationID); err != nil { + if err := qtx.EvaluationInstrumentDeleteForEvaluation(ctx, &evaluationID); err != nil { return err } - - if err := qtx.DeleteEvaluation(ctx, evaluationID); err != nil { + if err := qtx.EvaluationDelete(ctx, evaluationID); err != nil { return err } - return nil + return tx.Commit(ctx) } diff --git a/api/internal/service/heartbeat.go b/api/internal/service/heartbeat.go index 16fb78ca..025d8423 100644 --- a/api/internal/service/heartbeat.go +++ b/api/internal/service/heartbeat.go @@ -2,21 +2,41 @@ package service import ( "context" - - "github.com/USACE/instrumentation-api/api/internal/model" + "time" ) -type HeartbeatService interface { - DoHeartbeat(ctx context.Context) (model.Heartbeat, error) - GetLatestHeartbeat(ctx context.Context) (model.Heartbeat, error) - ListHeartbeats(ctx context.Context) ([]model.Heartbeat, error) +type Healthcheck struct { + Status string +} + +type Heartbeat struct { + Time time.Time +} + +func (s DBService) HeartbeatCreate(ctx context.Context, argTime time.Time) (Heartbeat, error) { + hb, err := s.Queries.HeartbeatCreate(ctx, argTime) + if err != nil { + return Heartbeat{}, err + } + return Heartbeat{hb}, nil } -type heartbeatService struct { - db *model.Database - *model.Queries +func (s DBService) HeartbeatGetLatest(ctx context.Context) (Heartbeat, error) { + hb, err := s.Queries.HeartbeatGetLatest(ctx) + if err != nil { + return Heartbeat{}, err + } + return Heartbeat{hb}, nil } -func NewHeartbeatService(db *model.Database, q *model.Queries) *heartbeatService { - return &heartbeatService{db, q} +func (s DBService) HeartbeatList(ctx context.Context, resultLimit int32) ([]Heartbeat, error) { + hh, err := s.Queries.HeartbeatList(ctx, resultLimit) + if err != nil { + return nil, err + } + rr := make([]Heartbeat, len(hh)) + for idx := range hh { + rr[idx] = Heartbeat{hh[idx]} + } + return rr, nil } diff --git a/api/internal/service/home.go b/api/internal/service/home.go deleted file mode 100644 index dc9f849a..00000000 --- a/api/internal/service/home.go +++ /dev/null @@ -1,20 +0,0 @@ -package service - -import ( - "context" - - "github.com/USACE/instrumentation-api/api/internal/model" -) - -type HomeService interface { - GetHome(ctx context.Context) (model.Home, error) -} - -type homeService struct { - db *model.Database - *model.Queries -} - -func NewHomeService(db *model.Database, q *model.Queries) *homeService { - return &homeService{db, q} -} diff --git a/api/internal/service/instrument.go b/api/internal/service/instrument.go index 73d0a734..c9446e54 100644 --- a/api/internal/service/instrument.go +++ b/api/internal/service/instrument.go @@ -2,35 +2,17 @@ package service import ( "context" + "slices" - "github.com/USACE/instrumentation-api/api/internal/model" + "github.com/USACE/instrumentation-api/api/internal/db" + "github.com/USACE/instrumentation-api/api/internal/dto" "github.com/google/uuid" - "github.com/paulmach/orb/geojson" ) -type InstrumentService interface { - ListInstruments(ctx context.Context) ([]model.Instrument, error) - GetInstrument(ctx context.Context, instrumentID uuid.UUID) (model.Instrument, error) - GetInstrumentCount(ctx context.Context) (model.InstrumentCount, error) - CreateInstrument(ctx context.Context, i model.Instrument) (model.IDSlugName, error) - CreateInstruments(ctx context.Context, instruments []model.Instrument) ([]model.IDSlugName, error) - UpdateInstrument(ctx context.Context, projectID uuid.UUID, i model.Instrument) (model.Instrument, error) - UpdateInstrumentGeometry(ctx context.Context, projectID, instrumentID uuid.UUID, geom geojson.Geometry, p model.Profile) (model.Instrument, error) - DeleteFlagInstrument(ctx context.Context, projectID, instrumentID uuid.UUID) error -} - -type instrumentService struct { - db *model.Database - *model.Queries -} - -func NewInstrumentService(db *model.Database, q *model.Queries) *instrumentService { - return &instrumentService{db, q} -} - var ( - saaTypeID = uuid.MustParse("07b91c5c-c1c5-428d-8bb9-e4c93ab2b9b9") - ipiTypeID = uuid.MustParse("c81f3a5d-fc5f-47fd-b545-401fe6ee63bb") + saaTypeID = uuid.MustParse("07b91c5c-c1c5-428d-8bb9-e4c93ab2b9b9") + ipiTypeID = uuid.MustParse("c81f3a5d-fc5f-47fd-b545-401fe6ee63bb") + inclTypeID = uuid.MustParse("3c3dfc23-ed2a-4a4a-9ce0-683c7c1d4d20") ) type requestType int @@ -40,128 +22,204 @@ const ( update ) -func createInstrument(ctx context.Context, q *model.Queries, instrument model.Instrument) (model.IDSlugName, error) { - newInstrument, err := q.CreateInstrument(ctx, instrument) +func (s DBService) InstrumentCreateBatch(ctx context.Context, ii []dto.Instrument) ([]db.InstrumentCreateBatchRow, error) { + tx, err := s.db.Begin(ctx) if err != nil { - return model.IDSlugName{}, err + return nil, err } - for _, prj := range instrument.Projects { - if err := q.AssignInstrumentToProject(ctx, prj.ID, newInstrument.ID); err != nil { - return model.IDSlugName{}, err + defer txDo(ctx, tx.Rollback) + qtx := s.WithTx(tx) + + createInstrumentsArgs := make([]db.InstrumentCreateBatchParams, len(ii)) + assignInstrumentsProjectsArgs := make([][]db.ProjectInstrumentCreateBatchParams, len(ii)) + instrumentStatusArgs := make([]db.InstrumentStatusCreateOrUpdateBatchParams, len(ii)) + instrumentAwareArgs := make([]db.AwarePlatformCreateBatchParams, 0) + + for idx, inst := range ii { + createInstrumentsArgs[idx] = db.InstrumentCreateBatchParams{ + Name: inst.Name, + TypeID: inst.TypeID, + Geometry: inst.Geometry, + Station: inst.Station, + StationOffset: inst.StationOffset, + CreatedBy: inst.CreatedBy, + CreatedAt: inst.CreatedAt, + NidID: inst.NIDID, + UsgsID: inst.USGSID, + ShowCwmsTab: inst.ShowCwmsTab, } } - if err := q.CreateOrUpdateInstrumentStatus(ctx, newInstrument.ID, instrument.StatusID, instrument.StatusTime); err != nil { - return model.IDSlugName{}, err - } - if instrument.AwareID != nil { - if err := q.CreateAwarePlatform(ctx, newInstrument.ID, *instrument.AwareID); err != nil { - return model.IDSlugName{}, err + newInstruments := make([]db.InstrumentCreateBatchRow, len(createInstrumentsArgs)) + qtx.InstrumentCreateBatch(ctx, createInstrumentsArgs).QueryRow(func(idx int, r db.InstrumentCreateBatchRow, e error) { + if e != nil { + err = e + return } - } - instrument.ID = newInstrument.ID - if err := handleOpts(ctx, q, instrument, create); err != nil { - return model.IDSlugName{}, err - } - return newInstrument, nil -} - -func (s instrumentService) CreateInstrument(ctx context.Context, instrument model.Instrument) (model.IDSlugName, error) { - tx, err := s.db.BeginTxx(ctx, nil) + assignInstrumentsProjectsArgs[idx] = make([]db.ProjectInstrumentCreateBatchParams, len(ii[idx].Projects)) + for j, p := range ii[idx].Projects { + assignInstrumentsProjectsArgs[idx][j] = db.ProjectInstrumentCreateBatchParams{ + InstrumentID: r.ID, + ProjectID: p.ID, + } + } + instrumentStatusArgs[idx] = db.InstrumentStatusCreateOrUpdateBatchParams{ + InstrumentID: r.ID, + StatusID: ii[idx].StatusID, + Time: ii[idx].StatusTime, + } + if ii[idx].AwareID != nil { + instrumentAwareArgs = append(instrumentAwareArgs, db.AwarePlatformCreateBatchParams{ + InstrumentID: &r.ID, + AwareID: *ii[idx].AwareID, + }) + } + newInstruments[idx] = r + ii[idx].ID = r.ID + }) if err != nil { - return model.IDSlugName{}, err + return nil, err } - defer model.TxDo(tx.Rollback) - - qtx := s.WithTx(tx) - - newInstrument, err := createInstrument(ctx, qtx, instrument) + qtx.ProjectInstrumentCreateBatch(ctx, slices.Concat(assignInstrumentsProjectsArgs...)).Exec(batchExecErr(&err)) if err != nil { - return model.IDSlugName{}, err + return nil, err } - - if err := tx.Commit(); err != nil { - return model.IDSlugName{}, err + qtx.InstrumentStatusCreateOrUpdateBatch(ctx, instrumentStatusArgs).Exec(batchExecErr(&err)) + if err != nil { + return nil, err } - return newInstrument, nil -} - -func (s instrumentService) CreateInstruments(ctx context.Context, instruments []model.Instrument) ([]model.IDSlugName, error) { - tx, err := s.db.BeginTxx(ctx, nil) + qtx.AwarePlatformCreateBatch(ctx, instrumentAwareArgs).Exec(batchExecErr(&err)) if err != nil { return nil, err } - defer model.TxDo(tx.Rollback) - - qtx := s.WithTx(tx) - - ii := make([]model.IDSlugName, len(instruments)) - for idx, i := range instruments { - newInstrument, err := createInstrument(ctx, qtx, i) - if err != nil { - return nil, err - } - ii[idx] = newInstrument + if err := handleOptsBatch(ctx, qtx, ii, create); err != nil { + return nil, err } - if err := tx.Commit(); err != nil { + + if err := tx.Commit(ctx); err != nil { return nil, err } - return ii, nil + + return newInstruments, nil } -// UpdateInstrument updates a single instrument -func (s instrumentService) UpdateInstrument(ctx context.Context, projectID uuid.UUID, i model.Instrument) (model.Instrument, error) { - tx, err := s.db.BeginTxx(ctx, nil) +func (s DBService) InstrumentUpdate(ctx context.Context, projectID uuid.UUID, inst dto.Instrument) (db.VInstrument, error) { + var a db.VInstrument + tx, err := s.db.Begin(ctx) if err != nil { - return model.Instrument{}, err + return a, err } - defer model.TxDo(tx.Rollback) - + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) - if err := qtx.UpdateInstrument(ctx, projectID, i); err != nil { - return model.Instrument{}, err - } - if err := qtx.CreateOrUpdateInstrumentStatus(ctx, i.ID, i.StatusID, i.StatusTime); err != nil { - return model.Instrument{}, err - } - - if err := handleOpts(ctx, qtx, i, update); err != nil { - return model.Instrument{}, err - } - - aa, err := qtx.GetInstrument(ctx, i.ID) + if err := qtx.InstrumentUpdate(ctx, db.InstrumentUpdateParams{ + ProjectID: projectID, + ID: inst.ID, + Name: inst.Name, + TypeID: inst.TypeID, + Geometry: inst.Geometry, + UpdatedBy: inst.UpdatedBy, + UpdatedAt: inst.UpdatedAt, + Station: inst.Station, + StationOffset: inst.StationOffset, + NidID: inst.NIDID, + UsgsID: inst.USGSID, + ShowCwmsTab: inst.ShowCwmsTab, + }); err != nil { + return a, err + } + if err := qtx.InstrumentStatusCreateOrUpdate(ctx, db.InstrumentStatusCreateOrUpdateParams{ + InstrumentID: inst.ID, + StatusID: inst.StatusID, + Time: inst.StatusTime, + }); err != nil { + return a, err + } + if err := handleOptsBatch(ctx, qtx, []dto.Instrument{inst}, update); err != nil { + return a, err + } + a, err = qtx.InstrumentGet(ctx, inst.ID) if err != nil { - return model.Instrument{}, err + return a, err } - - if err := tx.Commit(); err != nil { - return model.Instrument{}, err + if err := tx.Commit(ctx); err != nil { + return a, err } - return aa, nil + return a, nil } -func (s instrumentService) UpdateInstrumentGeometry(ctx context.Context, projectID, instrumentID uuid.UUID, geom geojson.Geometry, p model.Profile) (model.Instrument, error) { - tx, err := s.db.BeginTxx(ctx, nil) +func (s DBService) InstrumentUpdateGeometry(ctx context.Context, arg db.InstrumentUpdateGeometryParams) (db.VInstrument, error) { + var a db.VInstrument + tx, err := s.db.Begin(ctx) if err != nil { - return model.Instrument{}, err + return a, err } - defer model.TxDo(tx.Rollback) - + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) - if err := qtx.UpdateInstrumentGeometry(ctx, projectID, instrumentID, geom, p); err != nil { - return model.Instrument{}, err + if _, err := qtx.InstrumentUpdateGeometry(ctx, arg); err != nil { + return a, err } - - aa, err := qtx.GetInstrument(ctx, instrumentID) + a, err = qtx.InstrumentGet(ctx, arg.ID) if err != nil { - return model.Instrument{}, err + return a, err } - - if err := tx.Commit(); err != nil { - return model.Instrument{}, err + if err := tx.Commit(ctx); err != nil { + return a, err } + return a, nil +} - return aa, nil +func handleOptsBatch(ctx context.Context, q *db.Queries, ii []dto.Instrument, rt requestType) error { + saa := make([]dto.Instrument, 0) + ipi := make([]dto.Instrument, 0) + incl := make([]dto.Instrument, 0) + for _, inst := range ii { + switch inst.TypeID { + case saaTypeID: + saa = append(saa, inst) + case ipiTypeID: + ipi = append(ipi, inst) + case inclTypeID: + incl = append(incl, inst) + default: + } + } + if len(saa) != 0 { + var err error + switch rt { + case create: + err = createSaaOptsBatch(ctx, q, saa) + case update: + err = updateSaaOptsBatch(ctx, q, saa) + } + if err != nil { + return err + } + } + if len(ipi) != 0 { + var err error + switch rt { + case create: + err = createIpiOptsBatch(ctx, q, ipi) + case update: + err = updateIpiOptsBatch(ctx, q, ipi) + } + if err != nil { + return err + } + } + if len(incl) != 0 { + var err error + switch rt { + case create: + err = createInclOptsBatch(ctx, q, incl) + case update: + err = updateInclOptsBatch(ctx, q, incl) + } + if err != nil { + return err + } + } + return nil } diff --git a/api/internal/service/instrument_assign.go b/api/internal/service/instrument_assign.go index d5dc912b..f0662842 100644 --- a/api/internal/service/instrument_assign.go +++ b/api/internal/service/instrument_assign.go @@ -4,180 +4,322 @@ import ( "context" "fmt" - "github.com/USACE/instrumentation-api/api/internal/model" + "github.com/USACE/instrumentation-api/api/internal/db" "github.com/google/uuid" ) -type InstrumentAssignService interface { - AssignProjectsToInstrument(ctx context.Context, profileID, instrumentID uuid.UUID, projectIDs []uuid.UUID, dryRun bool) (model.InstrumentsValidation, error) - UnassignProjectsFromInstrument(ctx context.Context, profileID, instrumentID uuid.UUID, projectIDs []uuid.UUID, dryRun bool) (model.InstrumentsValidation, error) - AssignInstrumentsToProject(ctx context.Context, profileID, projectID uuid.UUID, instrumentIDs []uuid.UUID, dryRun bool) (model.InstrumentsValidation, error) - UnassignInstrumentsFromProject(ctx context.Context, profileID, projectID uuid.UUID, instrumentIDs []uuid.UUID, dryRun bool) (model.InstrumentsValidation, error) - ValidateInstrumentNamesProjectUnique(ctx context.Context, projectID uuid.UUID, instrumentNames []string) (model.InstrumentsValidation, error) - ValidateProjectsInstrumentNameUnique(ctx context.Context, instrumentName string, projectIDs []uuid.UUID) (model.InstrumentsValidation, error) -} +type ReasonCode int -type instrumentAssignService struct { - db *model.Database - *model.Queries -} +const ( + None ReasonCode = iota + Unauthorized + InvalidName + InvalidUnassign +) -func NewInstrumentAssignService(db *model.Database, q *model.Queries) *instrumentAssignService { - return &instrumentAssignService{db, q} +type InstrumentsValidation struct { + ReasonCode ReasonCode `json:"-"` + IsValid bool `json:"is_valid"` + Errors []string `json:"errors"` } -func validateAssignProjectsToInstrument(ctx context.Context, q *model.Queries, profileID uuid.UUID, instrument model.Instrument, projectIDs []uuid.UUID) (model.InstrumentsValidation, error) { - v, err := q.ValidateProjectsAssignerAuthorized(ctx, profileID, instrument.ID, projectIDs) - if err != nil || !v.IsValid { +func (s DBService) ProjectInstrumentCreateBatchAssignmentProjectsToInstrument(ctx context.Context, profileID, instrumentID uuid.UUID, projectIDs []uuid.UUID, dryRun bool) (InstrumentsValidation, error) { + var a InstrumentsValidation + tx, err := s.db.Begin(ctx) + if err != nil { + return a, err + } + defer txDo(ctx, tx.Rollback) + qtx := s.WithTx(tx) + + v, err := assignProjectsToInstrument(ctx, qtx, profileID, instrumentID, projectIDs) + if err != nil || !v.IsValid || dryRun { return v, err } - return q.ValidateProjectsInstrumentNameUnique(ctx, instrument.Name, projectIDs) + return v, tx.Commit(ctx) } -func validateAssignInstrumentsToProject(ctx context.Context, q *model.Queries, profileID, projectID uuid.UUID, instrumentIDs []uuid.UUID) (model.InstrumentsValidation, error) { - iIDNames, err := q.ListInstrumentIDNamesByIDs(ctx, instrumentIDs) +func (s DBService) ProjectInstrumentDeleteBatchAssignmentProjectsToInstrument(ctx context.Context, profileID, instrumentID uuid.UUID, projectIDs []uuid.UUID, dryRun bool) (InstrumentsValidation, error) { + var a InstrumentsValidation + tx, err := s.db.Begin(ctx) if err != nil { - return model.InstrumentsValidation{}, err + return a, err } - iIDs := make([]uuid.UUID, len(iIDNames)) - iNames := make([]string, len(iIDNames)) - for idx := range iIDNames { - iIDs[idx] = iIDNames[idx].ID - iNames[idx] = iIDNames[idx].Name - } - v, err := q.ValidateInstrumentsAssignerAuthorized(ctx, profileID, iIDs) - if err != nil || !v.IsValid { + defer txDo(ctx, tx.Rollback) + qtx := s.WithTx(tx) + + v, err := unassignProjectsFromInstrument(ctx, qtx, profileID, instrumentID, projectIDs) + if err != nil || !v.IsValid || dryRun { return v, err } - return q.ValidateInstrumentNamesProjectUnique(ctx, projectID, iNames) + return v, tx.Commit(ctx) } -func assignProjectsToInstrument(ctx context.Context, q *model.Queries, profileID, instrumentID uuid.UUID, projectIDs []uuid.UUID) (model.InstrumentsValidation, error) { - instrument, err := q.GetInstrument(ctx, instrumentID) +func (s DBService) ProjectInstrumentCreateBatchAssignmentInstrumentsToProject(ctx context.Context, profileID, projectID uuid.UUID, instrumentIDs []uuid.UUID, dryRun bool) (InstrumentsValidation, error) { + var a InstrumentsValidation + tx, err := s.db.Begin(ctx) if err != nil { - return model.InstrumentsValidation{}, err + return a, err } - v, err := validateAssignProjectsToInstrument(ctx, q, profileID, instrument, projectIDs) - if err != nil || !v.IsValid { + defer txDo(ctx, tx.Rollback) + qtx := s.WithTx(tx) + + v, err := assignInstrumentsToProject(ctx, qtx, profileID, projectID, instrumentIDs) + if err != nil || !v.IsValid || dryRun { return v, err } - for _, pID := range projectIDs { - if err := q.AssignInstrumentToProject(ctx, pID, instrumentID); err != nil { - return model.InstrumentsValidation{}, err - } - } - return v, nil + return v, tx.Commit(ctx) } -func unassignProjectsFromInstrument(ctx context.Context, q *model.Queries, profileID, instrumentID uuid.UUID, projectIDs []uuid.UUID) (model.InstrumentsValidation, error) { - v, err := q.ValidateProjectsAssignerAuthorized(ctx, profileID, instrumentID, projectIDs) - if err != nil || !v.IsValid { +func (s DBService) ProjectInstrumentDeleteBatchAssignmentInstrumentsToProject(ctx context.Context, profileID, projectID uuid.UUID, instrumentIDs []uuid.UUID, dryRun bool) (InstrumentsValidation, error) { + var a InstrumentsValidation + tx, err := s.db.Begin(ctx) + if err != nil { + return a, err + } + defer txDo(ctx, tx.Rollback) + qtx := s.WithTx(tx) + + v, err := unassignInstrumentsFromProject(ctx, qtx, profileID, projectID, instrumentIDs) + if err != nil || !v.IsValid || dryRun { return v, err } - for _, pID := range projectIDs { - if err := q.UnassignInstrumentFromProject(ctx, pID, instrumentID); err != nil { - return v, err + return v, tx.Commit(ctx) +} + +func (s DBService) ProjectInstrumentGetInstrumentNamesUniqueForProject(ctx context.Context, projectID uuid.UUID, instrumentNames []string) (InstrumentsValidation, error) { + return validateInstrumentNamesUniqueForProject(ctx, s.Queries, projectID, instrumentNames) +} + +func (s DBService) ProjectInstrumentGetInstrumentNameUniqueForProjects(ctx context.Context, instrumentName string, projectIDs []uuid.UUID) (InstrumentsValidation, error) { + return validateProjectsInstrumentNameUnique(ctx, s.Queries, instrumentName, projectIDs) +} + +func assignInstrumentsToProject(ctx context.Context, q *db.Queries, profileID, projectID uuid.UUID, instrumentIDs []uuid.UUID) (InstrumentsValidation, error) { + var a InstrumentsValidation + var err error + a, err = validateAssignInstrumentsToProject(ctx, q, profileID, projectID, instrumentIDs) + if err != nil || !a.IsValid { + return a, err + } + args := make([]db.ProjectInstrumentCreateBatchParams, len(instrumentIDs)) + for idx := range instrumentIDs { + args[idx] = db.ProjectInstrumentCreateBatchParams{ + ProjectID: projectID, + InstrumentID: instrumentIDs[idx], } } - return v, nil + q.ProjectInstrumentCreateBatch(ctx, args).Exec(batchExecErr(&err)) + if err != nil { + return a, err + } + return a, nil } -func assignInstrumentsToProject(ctx context.Context, q *model.Queries, profileID, projectID uuid.UUID, instrumentIDs []uuid.UUID) (model.InstrumentsValidation, error) { - v, err := validateAssignInstrumentsToProject(ctx, q, profileID, projectID, instrumentIDs) - if err != nil || !v.IsValid { - return v, err +func validateInstrumentNamesUniqueForProject(ctx context.Context, q *db.Queries, projectID uuid.UUID, instrumentNames []string) (InstrumentsValidation, error) { + var a InstrumentsValidation + nn, err := q.ProjectInstrumentListForProjectInstrumentNames(ctx, db.ProjectInstrumentListForProjectInstrumentNamesParams{ + ProjectID: projectID, + InstrumentNames: instrumentNames, + }) + if err != nil { + return a, err } - for _, iID := range instrumentIDs { - if err := q.AssignInstrumentToProject(ctx, projectID, iID); err != nil { - return v, err + if len(nn) != 0 { + vErrors := make([]string, len(nn)) + for idx := range nn { + vErrors[idx] = fmt.Sprintf( + "Instrument name '%s' is already taken. Instrument names must be unique within associated projects", + nn[idx], + ) } + a.Errors = vErrors + a.ReasonCode = InvalidName + } else { + a.IsValid = true + a.Errors = make([]string, 0) } - return v, nil + return a, err } -func unassignInstrumentsFromProject(ctx context.Context, q *model.Queries, profileID, projectID uuid.UUID, instrumentIDs []uuid.UUID) (model.InstrumentsValidation, error) { - v, err := q.ValidateInstrumentsAssignerAuthorized(ctx, profileID, instrumentIDs) - if err != nil || !v.IsValid { - return v, err - } - cc, err := q.GetProjectCountForInstruments(ctx, instrumentIDs) +func validateProjectsAssignerAuthorized(ctx context.Context, q *db.Queries, profileID, instrumentID uuid.UUID, projectIDs []uuid.UUID) (InstrumentsValidation, error) { + var a InstrumentsValidation + nn, err := q.ProjectInstrumentListForInstrumentProjectsProfileAdmin(ctx, db.ProjectInstrumentListForInstrumentProjectsProfileAdminParams{ + InstrumentID: instrumentID, + ProjectIds: projectIDs, + ProfileID: profileID, + }) if err != nil { - return model.InstrumentsValidation{}, err + return a, err } - - for _, count := range cc { - if count.ProjectCount < 1 { - // invalid instrument, skipping - continue - } - if count.ProjectCount == 1 { - v.IsValid = false - v.ReasonCode = model.InvalidUnassign - v.Errors = append(v.Errors, fmt.Sprintf("cannot unassign instruments from project, all instruments must have at least one project assinment (%s is only assign to this project)", count.InstrumentName)) + if len(nn) != 0 { + vErrors := make([]string, len(nn)) + for idx := range nn { + vErrors[idx] = fmt.Sprintf( + "Cannot assign instrument to project '%s' because the user is not an ADMIN of this project", + nn[idx], + ) } - if err := q.UnassignInstrumentFromProject(ctx, projectID, count.InstrumentID); err != nil { - return v, err + a.Errors = vErrors + a.ReasonCode = Unauthorized + } else { + a.IsValid = true + a.Errors = make([]string, 0) + } + return a, err +} + +func validateInstrumentsAssignerAuthorized(ctx context.Context, q *db.Queries, profileID uuid.UUID, instrumentIDs []uuid.UUID) (InstrumentsValidation, error) { + var a InstrumentsValidation + nn, err := q.ProjectInstrumentListForInstrumentsProfileAdmin(ctx, db.ProjectInstrumentListForInstrumentsProfileAdminParams{ + InstrumentIds: instrumentIDs, + ProfileID: profileID, + }) + if len(nn) != 0 { + vErrors := make([]string, len(nn)) + for idx := range nn { + vErrors[idx] = fmt.Sprintf( + "Cannot assign instrument '%s' because is assigned to another project '%s' which the user is not an ADMIN of", + nn[idx].InstrumentName, nn[idx].ProjectName, + ) } + a.Errors = vErrors + a.ReasonCode = Unauthorized + } else { + a.IsValid = true + a.Errors = make([]string, 0) } - return v, nil + return a, err } -func (s instrumentAssignService) AssignProjectsToInstrument(ctx context.Context, profileID, instrumentID uuid.UUID, projectIDs []uuid.UUID, dryRun bool) (model.InstrumentsValidation, error) { - tx, err := s.db.BeginTxx(ctx, nil) +func validateProjectsInstrumentNameUnique(ctx context.Context, q *db.Queries, instrumentName string, projectIDs []uuid.UUID) (InstrumentsValidation, error) { + var a InstrumentsValidation + nn, err := q.ProjectInstrumentListForInstrumentNameProjects(ctx, db.ProjectInstrumentListForInstrumentNameProjectsParams{ + InstrumentName: instrumentName, + ProjectIds: projectIDs, + }) if err != nil { - return model.InstrumentsValidation{}, err + return a, err } - defer model.TxDo(tx.Rollback) - qtx := s.WithTx(tx) + if len(nn) != 0 { + vErrors := make([]string, len(nn)) + for idx := range nn { + vErrors[idx] = fmt.Sprintf( + "Instrument name '%s' is already taken. Instrument names must be unique within associated projects", + nn[idx], + ) + } + a.Errors = vErrors + a.ReasonCode = InvalidName + } else { + a.IsValid = true + a.Errors = make([]string, 0) + } + return a, err +} - v, err := assignProjectsToInstrument(ctx, qtx, profileID, instrumentID, projectIDs) - if err != nil || !v.IsValid || dryRun { +func validateAssignProjectsToInstrument(ctx context.Context, q *db.Queries, profileID uuid.UUID, instrument db.VInstrument, projectIDs []uuid.UUID) (InstrumentsValidation, error) { + v, err := validateProjectsAssignerAuthorized(ctx, q, profileID, instrument.ID, projectIDs) + if err != nil || !v.IsValid { return v, err } - return v, tx.Commit() + return validateProjectsInstrumentNameUnique(ctx, q, instrument.Name, projectIDs) } -func (s instrumentAssignService) UnassignProjectsFromInstrument(ctx context.Context, profileID, instrumentID uuid.UUID, projectIDs []uuid.UUID, dryRun bool) (model.InstrumentsValidation, error) { - tx, err := s.db.BeginTxx(ctx, nil) +func unassignProjectsFromInstrument(ctx context.Context, q *db.Queries, profileID, instrumentID uuid.UUID, projectIDs []uuid.UUID) (InstrumentsValidation, error) { + var a InstrumentsValidation + var err error + a, err = validateProjectsAssignerAuthorized(ctx, q, profileID, instrumentID, projectIDs) + if err != nil || !a.IsValid { + return a, err + } + args := make([]db.ProjectInstrumentDeleteBatchParams, len(projectIDs)) + for idx := range projectIDs { + args[idx] = db.ProjectInstrumentDeleteBatchParams{ + ProjectID: projectIDs[idx], + InstrumentID: instrumentID, + } + } + q.ProjectInstrumentDeleteBatch(ctx, args).Exec(batchExecErr(&err)) if err != nil { - return model.InstrumentsValidation{}, err + return a, err } - defer model.TxDo(tx.Rollback) - qtx := s.WithTx(tx) + return a, nil +} - v, err := unassignProjectsFromInstrument(ctx, qtx, profileID, instrumentID, projectIDs) - if err != nil || !v.IsValid || dryRun { +func validateAssignInstrumentsToProject(ctx context.Context, q *db.Queries, profileID, projectID uuid.UUID, instrumentIDs []uuid.UUID) (InstrumentsValidation, error) { + var a InstrumentsValidation + iIDNames, err := q.InstrumentIDNameListByIDs(ctx, instrumentIDs) + if err != nil { + return a, err + } + iIDs := make([]uuid.UUID, len(iIDNames)) + iNames := make([]string, len(iIDNames)) + for idx := range iIDNames { + iIDs[idx] = iIDNames[idx].ID + iNames[idx] = iIDNames[idx].Name + } + v, err := validateInstrumentsAssignerAuthorized(ctx, q, profileID, iIDs) + if err != nil { return v, err } - return v, tx.Commit() + if !v.IsValid { + return v, nil + } + return validateInstrumentNamesUniqueForProject(ctx, q, projectID, iNames) } -func (s instrumentAssignService) AssignInstrumentsToProject(ctx context.Context, profileID, projectID uuid.UUID, instrumentIDs []uuid.UUID, dryRun bool) (model.InstrumentsValidation, error) { - tx, err := s.db.BeginTxx(ctx, nil) +func assignProjectsToInstrument(ctx context.Context, q *db.Queries, profileID, instrumentID uuid.UUID, projectIDs []uuid.UUID) (InstrumentsValidation, error) { + var a InstrumentsValidation + instrument, err := q.InstrumentGet(ctx, instrumentID) if err != nil { - return model.InstrumentsValidation{}, err + return a, err } - defer model.TxDo(tx.Rollback) - qtx := s.WithTx(tx) - - v, err := assignInstrumentsToProject(ctx, qtx, profileID, projectID, instrumentIDs) - if err != nil || !v.IsValid || dryRun { + v, err := validateAssignProjectsToInstrument(ctx, q, profileID, instrument, projectIDs) + if err != nil || !v.IsValid { return v, err } - return v, tx.Commit() + for _, pID := range projectIDs { + if err := q.ProjectInstrumentCreate(ctx, db.ProjectInstrumentCreateParams{ + ProjectID: pID, + InstrumentID: instrumentID, + }); err != nil { + return a, err + } + } + return v, nil } -func (s instrumentAssignService) UnassignInstrumentsFromProject(ctx context.Context, profileID, projectID uuid.UUID, instrumentIDs []uuid.UUID, dryRun bool) (model.InstrumentsValidation, error) { - tx, err := s.db.BeginTxx(ctx, nil) +func unassignInstrumentsFromProject(ctx context.Context, q *db.Queries, profileID, projectID uuid.UUID, instrumentIDs []uuid.UUID) (InstrumentsValidation, error) { + var a InstrumentsValidation + var err error + a, err = validateInstrumentsAssignerAuthorized(ctx, q, profileID, instrumentIDs) + if err != nil || !a.IsValid { + return a, err + } + cc, err := q.ProjectInstrumentListCountByInstrument(ctx, instrumentIDs) if err != nil { - return model.InstrumentsValidation{}, err + return a, err } - defer model.TxDo(tx.Rollback) - qtx := s.WithTx(tx) - - v, err := unassignInstrumentsFromProject(ctx, qtx, profileID, projectID, instrumentIDs) - if err != nil || !v.IsValid || dryRun { - return v, err + args := make([]db.ProjectInstrumentDeleteBatchParams, 0) + for _, count := range cc { + if count.ProjectCount < 1 { + // invalid instrument, skipping + continue + } + if count.ProjectCount == 1 { + a.IsValid = false + a.ReasonCode = InvalidUnassign + a.Errors = append(a.Errors, fmt.Sprintf("cannot unassign instruments from project, all instruments must have at least one project assinment (%s is only assign to this project)", count.InstrumentName)) + } + args = append(args, db.ProjectInstrumentDeleteBatchParams{ + ProjectID: projectID, + InstrumentID: count.InstrumentID, + }) + } + q.ProjectInstrumentDeleteBatch(ctx, args).Exec(batchExecErr(&err)) + if err != nil { + return a, err } - return v, tx.Commit() + return a, nil } diff --git a/api/internal/service/instrument_constant.go b/api/internal/service/instrument_constant.go index cf500988..3b98947d 100644 --- a/api/internal/service/instrument_constant.go +++ b/api/internal/service/instrument_constant.go @@ -2,73 +2,55 @@ package service import ( "context" + "errors" - "github.com/USACE/instrumentation-api/api/internal/model" - "github.com/google/uuid" + "github.com/USACE/instrumentation-api/api/internal/db" + "github.com/USACE/instrumentation-api/api/internal/dto" ) -type InstrumentConstantService interface { - ListInstrumentConstants(ctx context.Context, instrumentID uuid.UUID) ([]model.Timeseries, error) - CreateInstrumentConstant(ctx context.Context, instrumentID, timeseriesID uuid.UUID) error - CreateInstrumentConstants(ctx context.Context, tt []model.Timeseries) ([]model.Timeseries, error) - DeleteInstrumentConstant(ctx context.Context, instrumentID, timeseriesID uuid.UUID) error -} - -type instrumentConstantService struct { - db *model.Database - *model.Queries -} - -func NewInstrumentConstantService(db *model.Database, q *model.Queries) *instrumentConstantService { - return &instrumentConstantService{db, q} -} - -// CreateInstrumentConstants creates many instrument constants from an array of instrument constants -// An InstrumentConstant is structurally the same as a timeseries and saved in the same tables -func (s instrumentConstantService) CreateInstrumentConstants(ctx context.Context, tt []model.Timeseries) ([]model.Timeseries, error) { - tx, err := s.db.BeginTxx(ctx, nil) +func (s DBService) InstrumentConstantCreateBatch(ctx context.Context, tt []dto.Timeseries) ([]db.TimeseriesCreateBatchRow, error) { + tx, err := s.db.Begin(ctx) if err != nil { return nil, err } - defer model.TxDo(tx.Rollback) - + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) - uu := make([]model.Timeseries, len(tt)) + createTimeseriesParams := make([]db.TimeseriesCreateBatchParams, len(tt)) for idx, t := range tt { - t.Type = model.ConstantTimeseriesType - tsNew, err := qtx.CreateTimeseries(ctx, t) - if err != nil { - return nil, err - } - if err := qtx.CreateInstrumentConstant(ctx, tsNew.InstrumentID, tsNew.ID); err != nil { - return nil, err + createTimeseriesParams[idx] = db.TimeseriesCreateBatchParams{ + InstrumentID: &t.InstrumentID, + Name: t.Name, + ParameterID: t.ParameterID, + UnitID: t.UnitID, + Type: db.TimeseriesTypeConstant, } - uu[idx] = tsNew } - if err := tx.Commit(); err != nil { + uu := make([]db.TimeseriesCreateBatchRow, len(createTimeseriesParams)) + createConstantsParams := make([]db.InstrumentConstantCreateBatchParams, len(createTimeseriesParams)) + qtx.TimeseriesCreateBatch(ctx, createTimeseriesParams).QueryRow(func(i int, r db.TimeseriesCreateBatchRow, e error) { + if e != nil { + err = e + return + } + if r.InstrumentID == nil { + err = errors.New("instrument id must not be nil") + } + createConstantsParams[i] = db.InstrumentConstantCreateBatchParams{ + InstrumentID: *r.InstrumentID, + TimeseriesID: r.ID, + } + uu[i] = r + }) + if err != nil { return nil, err } - return uu, nil -} - -// DeleteInstrumentConstant removes a timeseries as an Instrument Constant; Does not delete underlying timeseries -func (s instrumentConstantService) DeleteInstrumentConstant(ctx context.Context, instrumentID, timeseriesID uuid.UUID) error { - tx, err := s.db.BeginTxx(ctx, nil) + qtx.InstrumentConstantCreateBatch(ctx, createConstantsParams).Exec(batchExecErr(&err)) if err != nil { - return err - } - defer model.TxDo(tx.Rollback) - - qtx := s.WithTx(tx) - - if err := qtx.DeleteInstrumentConstant(ctx, instrumentID, timeseriesID); err != nil { - return err + return nil, err } - - if err := qtx.DeleteTimeseries(ctx, timeseriesID); err != nil { - return err + if err := tx.Commit(ctx); err != nil { + return nil, err } - - return tx.Commit() + return uu, nil } diff --git a/api/internal/service/instrument_group.go b/api/internal/service/instrument_group.go index f20d0456..3048f2b5 100644 --- a/api/internal/service/instrument_group.go +++ b/api/internal/service/instrument_group.go @@ -3,52 +3,37 @@ package service import ( "context" - "github.com/USACE/instrumentation-api/api/internal/model" - "github.com/google/uuid" + "github.com/USACE/instrumentation-api/api/internal/db" + "github.com/USACE/instrumentation-api/api/internal/dto" ) -type InstrumentGroupService interface { - ListInstrumentGroups(ctx context.Context) ([]model.InstrumentGroup, error) - GetInstrumentGroup(ctx context.Context, instrumentGroupID uuid.UUID) (model.InstrumentGroup, error) - CreateInstrumentGroup(ctx context.Context, groups []model.InstrumentGroup) ([]model.InstrumentGroup, error) - UpdateInstrumentGroup(ctx context.Context, group model.InstrumentGroup) (model.InstrumentGroup, error) - DeleteFlagInstrumentGroup(ctx context.Context, instrumentGroupID uuid.UUID) error - ListInstrumentGroupInstruments(ctx context.Context, groupID uuid.UUID) ([]model.Instrument, error) - CreateInstrumentGroupInstruments(ctx context.Context, instrumentGroupID uuid.UUID, instrumentID uuid.UUID) error - DeleteInstrumentGroupInstruments(ctx context.Context, instrumentGroupID uuid.UUID, instrumentID uuid.UUID) error -} - -type instrumentGroupService struct { - db *model.Database - *model.Queries -} - -func NewInstrumentGroupService(db *model.Database, q *model.Queries) *instrumentGroupService { - return &instrumentGroupService{db, q} -} - -// CreateInstrumentGroup creates many instruments from an array of instruments -func (s instrumentGroupService) CreateInstrumentGroup(ctx context.Context, groups []model.InstrumentGroup) ([]model.InstrumentGroup, error) { - tx, err := s.db.BeginTxx(ctx, nil) - if err != nil { - return nil, err - } - defer model.TxDo(tx.Rollback) - - q := s.WithTx(tx) - - gg := make([]model.InstrumentGroup, len(groups)) +func (s DBService) InstrumentGroupCreateBatch(ctx context.Context, groups []dto.InstrumentGroup) ([]db.InstrumentGroupCreateBatchRow, error) { + args := make([]db.InstrumentGroupCreateBatchParams, len(groups)) for idx, g := range groups { - gNew, err := q.CreateInstrumentGroup(ctx, g) - if err != nil { - return nil, err + args[idx] = db.InstrumentGroupCreateBatchParams{ + Name: g.Name, + Description: &g.Description, + CreatedBy: g.CreatedBy, + CreatedAt: g.CreatedAt, + ProjectID: g.ProjectID, } - gg[idx] = gNew } - - if err := tx.Commit(); err != nil { + var err error + ggNew := make([]db.InstrumentGroupCreateBatchRow, len(groups)) + s.Queries.InstrumentGroupCreateBatch(ctx, args).QueryRow(batchQueryRowCollect(ggNew, &err)) + if err != nil { return nil, err } + return ggNew, nil +} - return gg, nil +func (s DBService) InstrumentGroupUpdate(ctx context.Context, g dto.InstrumentGroup) (db.InstrumentGroupUpdateRow, error) { + return s.Queries.InstrumentGroupUpdate(ctx, db.InstrumentGroupUpdateParams{ + ID: g.ID, + Name: g.Name, + Description: &g.Description, + ProjectID: g.ProjectID, + UpdatedBy: g.UpdatedBy, + UpdatedAt: g.UpdatedAt, + }) } diff --git a/api/internal/service/instrument_incl.go b/api/internal/service/instrument_incl.go new file mode 100644 index 00000000..71ad5404 --- /dev/null +++ b/api/internal/service/instrument_incl.go @@ -0,0 +1,159 @@ +package service + +import ( + "context" + "errors" + "slices" + "time" + + "github.com/USACE/instrumentation-api/api/internal/db" + "github.com/USACE/instrumentation-api/api/internal/dto" + "github.com/google/uuid" +) + +func (s DBService) InclSegmentUpdateBatch(ctx context.Context, instrumentID uuid.UUID, segs []dto.InclSegment) error { + tx, err := s.db.Begin(ctx) + if err != nil { + return err + } + defer txDo(ctx, tx.Rollback) + qtx := s.WithTx(tx) + + updateInclArgs := make([]db.InclSegmentUpdateBatchParams, len(segs)) + + for idx, seg := range segs { + updateInclArgs[idx] = db.InclSegmentUpdateBatchParams{ + ID: int32(seg.ID), + InstrumentID: instrumentID, + DepthTimeseriesID: seg.DepthTimeseriesID, + A0TimeseriesID: seg.A0TimeseriesID, + A180TimeseriesID: seg.A180TimeseriesID, + B0TimeseriesID: seg.B0TimeseriesID, + B180TimeseriesID: seg.B180TimeseriesID, + } + } + qtx.InclSegmentUpdateBatch(ctx, updateInclArgs).Exec(batchExecErr(&err)) + if err != nil { + return err + } + return tx.Commit(ctx) +} + +func createInclOptsBatch(ctx context.Context, q *db.Queries, ii []dto.Instrument) error { + createTimeseriesBatchParams := make([][]db.TimeseriesCreateBatchParams, len(ii)) + createInclSegmentBatchParams := make([][]db.InclSegmentCreateBatchParams, len(ii)) + + createBottomElevationTsParams := make([]db.TimeseriesCreateBatchParams, len(ii)) + createInclOptsParams := make([]db.InclOptsCreateBatchParams, len(ii)) + createBottomElevationMmtParams := make([]db.TimeseriesMeasurementCreateBatchParams, len(ii)) + + for idx, inst := range ii { + opts, err := dto.MapToStruct[dto.InclOpts](inst.Opts) + if err != nil { + return err + } + createInclSegmentBatchParams[idx] = make([]db.InclSegmentCreateBatchParams, opts.NumSegments) + + for i := range opts.NumSegments { + createInclSegmentBatchParams[idx][i] = db.InclSegmentCreateBatchParams{ + ID: int32(i + 1), + InstrumentID: inst.ID, + } + } + createBottomElevationTsParams[idx] = db.TimeseriesCreateBatchParams{ + InstrumentID: &inst.ID, + Name: inst.Slug + "-bottom-elevation", + ParameterID: dto.InclParameterID, + UnitID: dto.FeetUnitID, + Type: db.TimeseriesTypeConstant, + } + createInclOptsParams[idx] = db.InclOptsCreateBatchParams{ + InstrumentID: inst.ID, + NumSegments: int32(opts.NumSegments), + InitialTime: opts.InitialTime, + } + createBottomElevationMmtParams[idx] = db.TimeseriesMeasurementCreateBatchParams{ + Time: time.Now(), + Value: opts.BottomElevation, + } + } + + args := slices.Concat(createTimeseriesBatchParams...) + inclArgs := slices.Concat(createInclSegmentBatchParams...) + createInstrumentConstantBatchParams := make([]db.InstrumentConstantCreateBatchParams, len(args)) + + var err error + q.TimeseriesCreateBatch(ctx, args).QueryRow(func(i int, r db.TimeseriesCreateBatchRow, e error) { + if e != nil { + err = e + return + } + if r.InstrumentID == nil { + err = errors.New("new timeseries must have instrument id") + return + } + createInstrumentConstantBatchParams[i] = db.InstrumentConstantCreateBatchParams{ + TimeseriesID: r.ID, + InstrumentID: *r.InstrumentID, + } + }) + if err != nil { + return err + } + q.InstrumentConstantCreateBatch(ctx, createInstrumentConstantBatchParams).Exec(batchExecErr(&err)) + if err != nil { + return err + } + q.InclSegmentCreateBatch(ctx, inclArgs).Exec(batchExecErr(&err)) + if err != nil { + return err + } + q.TimeseriesCreateBatch(ctx, createBottomElevationTsParams).QueryRow(func(i int, r db.TimeseriesCreateBatchRow, e error) { + if e != nil { + err = e + return + } + createInclOptsParams[i].BottomElevationTimeseriesID = &r.ID + createBottomElevationMmtParams[i].TimeseriesID = r.ID + }) + if err != nil { + return err + } + q.InclOptsCreateBatch(ctx, createInclOptsParams).Exec(batchExecErr(&err)) + if err != nil { + return err + } + q.TimeseriesMeasurementCreateBatch(ctx, createBottomElevationMmtParams).Exec(batchExecErr(&err)) + if err != nil { + return err + } + return err +} + +func updateInclOptsBatch(ctx context.Context, q *db.Queries, ii []dto.Instrument) error { + updateInclOptsParams := make([]db.InclOptsUpdateBatchParams, len(ii)) + createMmtParams := make([]db.TimeseriesMeasurementCreateBatchParams, len(ii)) + for idx, inst := range ii { + opts, err := dto.MapToStruct[dto.InclOpts](inst.Opts) + if err != nil { + return err + } + updateInclOptsParams[idx] = db.InclOptsUpdateBatchParams{ + InstrumentID: inst.ID, + BottomElevationTimeseriesID: &opts.BottomElevationTimeseriesID, + InitialTime: opts.InitialTime, + } + createMmtParams[idx] = db.TimeseriesMeasurementCreateBatchParams{ + TimeseriesID: opts.BottomElevationTimeseriesID, + Time: time.Now(), + Value: opts.BottomElevation, + } + } + var err error + q.InclOptsUpdateBatch(ctx, updateInclOptsParams).Exec(batchExecErr(&err)) + if err != nil { + return err + } + q.TimeseriesMeasurementCreateBatch(ctx, createMmtParams).Exec(batchExecErr(&err)) + return err +} diff --git a/api/internal/service/instrument_ipi.go b/api/internal/service/instrument_ipi.go index 52ac84f6..cc0e8746 100644 --- a/api/internal/service/instrument_ipi.go +++ b/api/internal/service/instrument_ipi.go @@ -2,47 +2,187 @@ package service import ( "context" + "errors" + "fmt" + "slices" "time" - "github.com/USACE/instrumentation-api/api/internal/model" + "github.com/USACE/instrumentation-api/api/internal/db" + "github.com/USACE/instrumentation-api/api/internal/dto" "github.com/google/uuid" ) -type IpiInstrumentService interface { - GetAllIpiSegmentsForInstrument(ctx context.Context, instrumentID uuid.UUID) ([]model.IpiSegment, error) - UpdateIpiSegment(ctx context.Context, seg model.IpiSegment) error - UpdateIpiSegments(ctx context.Context, segs []model.IpiSegment) error - GetIpiMeasurementsForInstrument(ctx context.Context, instrumentID uuid.UUID, tw model.TimeWindow) ([]model.IpiMeasurements, error) -} - -type ipiInstrumentService struct { - db *model.Database - *model.Queries -} +func (s DBService) IpiSegmentUpdateBatch(ctx context.Context, instrumentID uuid.UUID, segs []dto.IpiSegment) error { + tx, err := s.db.Begin(ctx) + if err != nil { + return err + } + defer txDo(ctx, tx.Rollback) + qtx := s.WithTx(tx) -func NewIpiInstrumentService(db *model.Database, q *model.Queries) *ipiInstrumentService { - return &ipiInstrumentService{db, q} -} + updateIpiArgs := make([]db.IpiSegmentUpdateBatchParams, len(segs)) + createMmtArgs := make([]db.TimeseriesMeasurementCreateBatchParams, 0) -func (s ipiInstrumentService) UpdateIpiSegments(ctx context.Context, segs []model.IpiSegment) error { - tx, err := s.db.BeginTxx(ctx, nil) + for idx, seg := range segs { + updateIpiArgs[idx] = db.IpiSegmentUpdateBatchParams{ + ID: int32(seg.ID), + InstrumentID: instrumentID, + LengthTimeseriesID: &seg.LengthTimeseriesID, + TiltTimeseriesID: seg.TiltTimeseriesID, + IncDevTimeseriesID: seg.IncDevTimeseriesID, + TempTimeseriesID: seg.TempTimeseriesID, + } + if seg.Length == nil { + continue + } + createMmtArgs = append(createMmtArgs, db.TimeseriesMeasurementCreateBatchParams{ + TimeseriesID: seg.LengthTimeseriesID, + Time: time.Now(), + Value: *seg.Length, + }) + } + qtx.IpiSegmentUpdateBatch(ctx, updateIpiArgs).Exec(batchExecErr(&err)) + if err != nil { + return err + } + qtx.TimeseriesMeasurementCreateBatch(ctx, createMmtArgs).Exec(batchExecErr(&err)) if err != nil { return err } - defer model.TxDo(tx.Rollback) + return tx.Commit(ctx) +} - qtx := s.WithTx(tx) +func createIpiOptsBatch(ctx context.Context, q *db.Queries, ii []dto.Instrument) error { + createTimeseriesBatchParams := make([][]db.TimeseriesCreateBatchParams, len(ii)) + createIpiSegmentBatchParams := make([][]db.IpiSegmentCreateBatchParams, len(ii)) - for _, seg := range segs { - if err := qtx.UpdateIpiSegment(ctx, seg); err != nil { + createBottomElevationTsParams := make([]db.TimeseriesCreateBatchParams, len(ii)) + createIpiOptsParams := make([]db.IpiOptsCreateBatchParams, len(ii)) + createBottomElevationMmtParams := make([]db.TimeseriesMeasurementCreateBatchParams, len(ii)) + + for idx, inst := range ii { + opts, err := dto.MapToStruct[dto.IpiOpts](inst.Opts) + if err != nil { return err } - if seg.Length == nil { - continue + createTimeseriesBatchParams[idx] = make([]db.TimeseriesCreateBatchParams, opts.NumSegments) + createIpiSegmentBatchParams[idx] = make([]db.IpiSegmentCreateBatchParams, opts.NumSegments) + + for i := range opts.NumSegments { + createTimeseriesBatchParams[idx][i] = db.TimeseriesCreateBatchParams{ + InstrumentID: &inst.ID, + Name: inst.Slug + fmt.Sprintf("segment-%d-length", i+1), + ParameterID: dto.IpiParameterID, + UnitID: dto.FeetUnitID, + Type: db.TimeseriesTypeConstant, + } + createIpiSegmentBatchParams[idx][i] = db.IpiSegmentCreateBatchParams{ + ID: int32(i + 1), + InstrumentID: inst.ID, + } + } + createBottomElevationTsParams[idx] = db.TimeseriesCreateBatchParams{ + InstrumentID: &inst.ID, + Name: inst.Slug + "-bottom-elevation", + ParameterID: dto.IpiParameterID, + UnitID: dto.FeetUnitID, + Type: db.TimeseriesTypeConstant, + } + createIpiOptsParams[idx] = db.IpiOptsCreateBatchParams{ + InstrumentID: inst.ID, + NumSegments: int32(opts.NumSegments), + InitialTime: opts.InitialTime, + } + createBottomElevationMmtParams[idx] = db.TimeseriesMeasurementCreateBatchParams{ + Time: time.Now(), + Value: opts.BottomElevation, + } + } + + args := slices.Concat(createTimeseriesBatchParams...) + ipiArgs := slices.Concat(createIpiSegmentBatchParams...) + createInstrumentConstantBatchParams := make([]db.InstrumentConstantCreateBatchParams, len(args)) + + var err error + q.TimeseriesCreateBatch(ctx, args).QueryRow(func(i int, r db.TimeseriesCreateBatchRow, e error) { + if e != nil { + err = e + return + } + if r.InstrumentID == nil { + err = errors.New("new timeseries must have instrument id") + return + } + createInstrumentConstantBatchParams[i] = db.InstrumentConstantCreateBatchParams{ + TimeseriesID: r.ID, + InstrumentID: *r.InstrumentID, + } + ipiArgs[i].LengthTimeseriesID = &r.ID + }) + if err != nil { + return err + } + q.InstrumentConstantCreateBatch(ctx, createInstrumentConstantBatchParams).Exec(batchExecErr(&err)) + if err != nil { + return err + } + q.IpiSegmentCreateBatch(ctx, ipiArgs).Exec(batchExecErr(&err)) + if err != nil { + return err + } + q.TimeseriesCreateBatch(ctx, createBottomElevationTsParams).QueryRow(func(i int, r db.TimeseriesCreateBatchRow, e error) { + if e != nil { + err = e + return + } + createIpiOptsParams[i].BottomElevationTimeseriesID = &r.ID + createBottomElevationMmtParams[i].TimeseriesID = r.ID + }) + if err != nil { + return err + } + q.IpiOptsCreateBatch(ctx, createIpiOptsParams).Exec(func(_ int, e error) { + if e != nil { + err = e + return + } + }) + if err != nil { + return err + } + q.TimeseriesMeasurementCreateBatch(ctx, createBottomElevationMmtParams).Exec(func(_ int, e error) { + if e != nil { + err = e + return } - if err := qtx.CreateTimeseriesMeasurement(ctx, seg.LengthTimeseriesID, time.Now(), *seg.Length); err != nil { + }) + return err +} + +func updateIpiOptsBatch(ctx context.Context, q *db.Queries, ii []dto.Instrument) error { + updateIpiOptsParams := make([]db.IpiOptsUpdateBatchParams, len(ii)) + createMmtParams := make([]db.TimeseriesMeasurementCreateBatchParams, len(ii)) + for idx, inst := range ii { + opts, err := dto.MapToStruct[dto.IpiOpts](inst.Opts) + if err != nil { return err } + updateIpiOptsParams[idx] = db.IpiOptsUpdateBatchParams{ + InstrumentID: inst.ID, + BottomElevationTimeseriesID: &opts.BottomElevationTimeseriesID, + InitialTime: opts.InitialTime, + } + createMmtParams[idx] = db.TimeseriesMeasurementCreateBatchParams{ + TimeseriesID: opts.BottomElevationTimeseriesID, + Time: time.Now(), + Value: opts.BottomElevation, + } + } + var err error + q.IpiOptsUpdateBatch(ctx, updateIpiOptsParams).Exec(batchExecErr(&err)) + if err != nil { + return err } - return tx.Commit() + q.TimeseriesMeasurementCreateBatch(ctx, createMmtParams).Exec(batchExecErr(&err)) + return err } diff --git a/api/internal/service/instrument_note.go b/api/internal/service/instrument_note.go index 77a462c6..c7072913 100644 --- a/api/internal/service/instrument_note.go +++ b/api/internal/service/instrument_note.go @@ -3,50 +3,48 @@ package service import ( "context" - "github.com/USACE/instrumentation-api/api/internal/model" - "github.com/google/uuid" + "github.com/USACE/instrumentation-api/api/internal/db" + "github.com/USACE/instrumentation-api/api/internal/dto" ) -type InstrumentNoteService interface { - ListInstrumentNotes(ctx context.Context) ([]model.InstrumentNote, error) - ListInstrumentInstrumentNotes(ctx context.Context, instrumentID uuid.UUID) ([]model.InstrumentNote, error) - GetInstrumentNote(ctx context.Context, noteID uuid.UUID) (model.InstrumentNote, error) - CreateInstrumentNote(ctx context.Context, notes []model.InstrumentNote) ([]model.InstrumentNote, error) - UpdateInstrumentNote(ctx context.Context, n model.InstrumentNote) (model.InstrumentNote, error) - DeleteInstrumentNote(ctx context.Context, noteID uuid.UUID) error -} - -type instrumentNoteService struct { - db *model.Database - *model.Queries -} - -func NewInstrumentNoteService(db *model.Database, q *model.Queries) *instrumentNoteService { - return &instrumentNoteService{db, q} -} - -// CreateInstrumentNote creates many instrument notes from an array of instrument notes -func (s instrumentNoteService) CreateInstrumentNote(ctx context.Context, notes []model.InstrumentNote) ([]model.InstrumentNote, error) { - tx, err := s.db.BeginTxx(ctx, nil) +func (s DBService) InstrumentNoteCreateBatch(ctx context.Context, notes []dto.InstrumentNote) ([]db.InstrumentNote, error) { + tx, err := s.db.Begin(ctx) if err != nil { return nil, err } - defer model.TxDo(tx.Rollback) - + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) - nn := make([]model.InstrumentNote, len(notes)) + args := make([]db.InstrumentNoteCreateBatchParams, len(notes)) for idx, n := range notes { - noteNew, err := qtx.CreateInstrumentNote(ctx, n) - if err != nil { - return nil, err + args[idx] = db.InstrumentNoteCreateBatchParams{ + InstrumentID: n.InstrumentID, + Title: n.Title, + Body: n.Body, + Time: n.Time, + CreatedBy: n.CreatedBy, + CreatedAt: n.CreatedAt, } - nn[idx] = noteNew } - - if err := tx.Commit(); err != nil { + nn := make([]db.InstrumentNote, len(args)) + qtx.InstrumentNoteCreateBatch(ctx, args).QueryRow(batchQueryRowCollect(nn, &err)) + if err != nil { + return nil, err + } + if err := tx.Commit(ctx); err != nil { return nil, err } return nn, nil } + +func (s DBService) InstrumentNoteUpdate(ctx context.Context, u dto.InstrumentNote) (db.InstrumentNote, error) { + return s.Queries.InstrumentNoteUpdate(ctx, db.InstrumentNoteUpdateParams{ + ID: u.ID, + Title: u.Title, + Body: u.Body, + Time: u.Time, + UpdatedBy: u.UpdatedBy, + UpdatedAt: u.UpdatedAt, + }) +} diff --git a/api/internal/service/instrument_opts.go b/api/internal/service/instrument_opts.go deleted file mode 100644 index 84eca1ef..00000000 --- a/api/internal/service/instrument_opts.go +++ /dev/null @@ -1,130 +0,0 @@ -package service - -import ( - "context" - "fmt" - "time" - - "github.com/USACE/instrumentation-api/api/internal/model" -) - -func handleOpts(ctx context.Context, q *model.Queries, inst model.Instrument, rt requestType) error { - switch inst.TypeID { - case saaTypeID: - opts, err := model.MapToStruct[model.SaaOpts](inst.Opts) - if err != nil { - return err - } - if rt == create { - for i := 1; i <= opts.NumSegments; i++ { - tsConstant := model.Timeseries{ - InstrumentID: inst.ID, - ParameterID: model.SaaParameterID, - UnitID: model.FeetUnitID, - } - tsConstant.Slug = inst.Slug + fmt.Sprintf("segment-%d-length", i) - tsConstant.Name = inst.Slug + fmt.Sprintf("segment-%d-length", i) - - tsConstant.Type = model.ConstantTimeseriesType - tsNew, err := q.CreateTimeseries(ctx, tsConstant) - if err != nil { - return err - } - if err := q.CreateInstrumentConstant(ctx, inst.ID, tsNew.ID); err != nil { - return err - } - if err := q.CreateSaaSegment(ctx, model.SaaSegment{ID: i, InstrumentID: inst.ID, LengthTimeseriesID: tsNew.ID}); err != nil { - return err - } - } - - tsConstant := model.Timeseries{ - InstrumentID: inst.ID, - ParameterID: model.SaaParameterID, - UnitID: model.FeetUnitID, - } - tsConstant.Slug = inst.Slug + "-bottom-elevation" - tsConstant.Name = inst.Slug + "-bottom-elevation" - - tsConstant.Type = model.ConstantTimeseriesType - tsNew, err := q.CreateTimeseries(ctx, tsConstant) - if err != nil { - return err - } - if err := q.CreateInstrumentConstant(ctx, inst.ID, tsNew.ID); err != nil { - return err - } - opts.BottomElevationTimeseriesID = tsNew.ID - if err := q.CreateSaaOpts(ctx, inst.ID, opts); err != nil { - return err - } - } - if rt == update { - if err := q.UpdateSaaOpts(ctx, inst.ID, opts); err != nil { - return err - } - } - if err := q.CreateTimeseriesMeasurement(ctx, opts.BottomElevationTimeseriesID, time.Now(), opts.BottomElevation); err != nil { - return err - } - case ipiTypeID: - opts, err := model.MapToStruct[model.IpiOpts](inst.Opts) - if err != nil { - return err - } - if rt == create { - for i := 1; i <= opts.NumSegments; i++ { - tsConstant := model.Timeseries{ - InstrumentID: inst.ID, - ParameterID: model.IpiParameterID, - UnitID: model.FeetUnitID, - } - tsConstant.Slug = inst.Slug + fmt.Sprintf("segment-%d-length", i) - tsConstant.Name = inst.Slug + fmt.Sprintf("segment-%d-length", i) - - tsConstant.Type = model.ConstantTimeseriesType - tsNew, err := q.CreateTimeseries(ctx, tsConstant) - if err != nil { - return err - } - if err := q.CreateInstrumentConstant(ctx, inst.ID, tsNew.ID); err != nil { - return err - } - if err := q.CreateIpiSegment(ctx, model.IpiSegment{ID: i, InstrumentID: inst.ID, LengthTimeseriesID: tsNew.ID}); err != nil { - return err - } - } - - tsConstant := model.Timeseries{ - InstrumentID: inst.ID, - ParameterID: model.IpiParameterID, - UnitID: model.FeetUnitID, - } - tsConstant.Slug = inst.Slug + "-bottom-elevation" - tsConstant.Name = inst.Slug + "-bottom-elevation" - - tsConstant.Type = model.ConstantTimeseriesType - tsNew, err := q.CreateTimeseries(ctx, tsConstant) - if err != nil { - return err - } - if err := q.CreateInstrumentConstant(ctx, inst.ID, tsNew.ID); err != nil { - return err - } - opts.BottomElevationTimeseriesID = tsNew.ID - if err := q.CreateIpiOpts(ctx, inst.ID, opts); err != nil { - return err - } - } - if rt == update { - if err := q.UpdateIpiOpts(ctx, inst.ID, opts); err != nil { - return err - } - } - if err := q.CreateTimeseriesMeasurement(ctx, opts.BottomElevationTimeseriesID, time.Now(), opts.BottomElevation); err != nil { - return err - } - default: - } - return nil -} diff --git a/api/internal/service/instrument_saa.go b/api/internal/service/instrument_saa.go index 8c42eca2..9e5bbbac 100644 --- a/api/internal/service/instrument_saa.go +++ b/api/internal/service/instrument_saa.go @@ -2,47 +2,177 @@ package service import ( "context" + "errors" + "fmt" + "slices" "time" - "github.com/USACE/instrumentation-api/api/internal/model" + "github.com/USACE/instrumentation-api/api/internal/db" + "github.com/USACE/instrumentation-api/api/internal/dto" "github.com/google/uuid" ) -type SaaInstrumentService interface { - GetAllSaaSegmentsForInstrument(ctx context.Context, instrumentID uuid.UUID) ([]model.SaaSegment, error) - UpdateSaaSegment(ctx context.Context, seg model.SaaSegment) error - UpdateSaaSegments(ctx context.Context, segs []model.SaaSegment) error - GetSaaMeasurementsForInstrument(ctx context.Context, instrumentID uuid.UUID, tw model.TimeWindow) ([]model.SaaMeasurements, error) -} +func (s DBService) SaaSegmentUpdateBatch(ctx context.Context, instrumentID uuid.UUID, segs []dto.SaaSegment) error { + tx, err := s.db.Begin(ctx) + if err != nil { + return err + } + defer txDo(ctx, tx.Rollback) + qtx := s.WithTx(tx) -type saaInstrumentService struct { - db *model.Database - *model.Queries + updateSaaSegParams := make([]db.SaaSegmentUpdateBatchParams, len(segs)) + createMmtParams := make([]db.TimeseriesMeasurementCreateBatchParams, 0) + for idx, seg := range segs { + updateSaaSegParams[idx] = db.SaaSegmentUpdateBatchParams{ + ID: int32(seg.ID), + InstrumentID: instrumentID, + LengthTimeseriesID: &seg.LengthTimeseriesID, + XTimeseriesID: seg.XTimeseriesID, + YTimeseriesID: seg.YTimeseriesID, + ZTimeseriesID: seg.ZTimeseriesID, + TempTimeseriesID: seg.TempTimeseriesID, + } + if seg.Length == nil { + continue + } + createMmtParams = append(createMmtParams, db.TimeseriesMeasurementCreateBatchParams{ + TimeseriesID: seg.LengthTimeseriesID, + Time: time.Now(), + Value: *seg.Length, + }) + } + qtx.SaaSegmentUpdateBatch(ctx, updateSaaSegParams).Exec(batchExecErr(&err)) + if err != nil { + return err + } + qtx.TimeseriesMeasurementCreateBatch(ctx, createMmtParams).Exec(batchExecErr(&err)) + if err != nil { + return err + } + return tx.Commit(ctx) } -func NewSaaInstrumentService(db *model.Database, q *model.Queries) *saaInstrumentService { - return &saaInstrumentService{db, q} -} +func createSaaOptsBatch(ctx context.Context, q *db.Queries, ii []dto.Instrument) error { + createTimeseriesBatchParams := make([][]db.TimeseriesCreateBatchParams, len(ii)) + createSaaSegmentBatchParams := make([][]db.SaaSegmentCreateBatchParams, len(ii)) + + createBottomElevationTsParams := make([]db.TimeseriesCreateBatchParams, len(ii)) + createSaaOptsParams := make([]db.SaaOptsCreateBatchParams, len(ii)) + createBottomElevationMmtParams := make([]db.TimeseriesMeasurementCreateBatchParams, len(ii)) + + for idx, inst := range ii { + opts, err := dto.MapToStruct[dto.SaaOpts](inst.Opts) + if err != nil { + return err + } + createTimeseriesBatchParams[idx] = make([]db.TimeseriesCreateBatchParams, opts.NumSegments) + createSaaSegmentBatchParams[idx] = make([]db.SaaSegmentCreateBatchParams, opts.NumSegments) + + for i := range opts.NumSegments { + createTimeseriesBatchParams[idx][i] = db.TimeseriesCreateBatchParams{ + InstrumentID: &inst.ID, + Name: inst.Slug + fmt.Sprintf("segment-%d-length", i+1), + ParameterID: dto.SaaParameterID, + UnitID: dto.FeetUnitID, + Type: db.TimeseriesTypeConstant, + } + createSaaSegmentBatchParams[idx][i] = db.SaaSegmentCreateBatchParams{ + ID: int32(i + 1), + InstrumentID: inst.ID, + } + } + createBottomElevationTsParams[idx] = db.TimeseriesCreateBatchParams{ + InstrumentID: &inst.ID, + Name: inst.Slug + "-bottom-elevation", + ParameterID: dto.SaaParameterID, + UnitID: dto.FeetUnitID, + Type: db.TimeseriesTypeConstant, + } + createSaaOptsParams[idx] = db.SaaOptsCreateBatchParams{ + InstrumentID: inst.ID, + NumSegments: int32(opts.NumSegments), + InitialTime: opts.InitialTime, + } + createBottomElevationMmtParams[idx] = db.TimeseriesMeasurementCreateBatchParams{ + Time: time.Now(), + Value: opts.BottomElevation, + } + } -func (s saaInstrumentService) UpdateSaaSegments(ctx context.Context, segs []model.SaaSegment) error { - tx, err := s.db.BeginTxx(ctx, nil) + args := slices.Concat(createTimeseriesBatchParams...) + saaArgs := slices.Concat(createSaaSegmentBatchParams...) + createInstrumentConstantBatchParams := make([]db.InstrumentConstantCreateBatchParams, len(args)) + + var err error + q.TimeseriesCreateBatch(ctx, args).QueryRow(func(i int, r db.TimeseriesCreateBatchRow, e error) { + if e != nil { + err = e + return + } + if r.InstrumentID == nil { + err = errors.New("new timeseries must have instrument id") + return + } + createInstrumentConstantBatchParams[i] = db.InstrumentConstantCreateBatchParams{ + TimeseriesID: r.ID, + InstrumentID: *r.InstrumentID, + } + saaArgs[i].LengthTimeseriesID = &r.ID + }) if err != nil { return err } - defer model.TxDo(tx.Rollback) - - qtx := s.WithTx(tx) + q.InstrumentConstantCreateBatch(ctx, createInstrumentConstantBatchParams).Exec(batchExecErr(&err)) + if err != nil { + return err + } + q.SaaSegmentCreateBatch(ctx, saaArgs).Exec(batchExecErr(&err)) + if err != nil { + return err + } + q.TimeseriesCreateBatch(ctx, createBottomElevationTsParams).QueryRow(func(i int, r db.TimeseriesCreateBatchRow, e error) { + if e != nil { + err = e + return + } + createSaaOptsParams[i].BottomElevationTimeseriesID = &r.ID + createBottomElevationMmtParams[i].TimeseriesID = r.ID + }) + if err != nil { + return err + } + q.SaaOptsCreateBatch(ctx, createSaaOptsParams).Exec(batchExecErr(&err)) + if err != nil { + return err + } + q.TimeseriesMeasurementCreateBatch(ctx, createBottomElevationMmtParams).Exec(batchExecErr(&err)) + return err +} - for _, seg := range segs { - if err := qtx.UpdateSaaSegment(ctx, seg); err != nil { +func updateSaaOptsBatch(ctx context.Context, q *db.Queries, ii []dto.Instrument) error { + updateSaaOptsParams := make([]db.SaaOptsUpdateBatchParams, len(ii)) + createMmtParams := make([]db.TimeseriesMeasurementCreateBatchParams, len(ii)) + for idx, inst := range ii { + opts, err := dto.MapToStruct[dto.SaaOpts](inst.Opts) + if err != nil { return err } - if seg.Length == nil { - continue + updateSaaOptsParams[idx] = db.SaaOptsUpdateBatchParams{ + InstrumentID: inst.ID, + BottomElevationTimeseriesID: &opts.BottomElevationTimeseriesID, + InitialTime: opts.InitialTime, } - if err := qtx.CreateTimeseriesMeasurement(ctx, seg.LengthTimeseriesID, time.Now(), *seg.Length); err != nil { - return err + createMmtParams[idx] = db.TimeseriesMeasurementCreateBatchParams{ + TimeseriesID: opts.BottomElevationTimeseriesID, + Time: time.Now(), + Value: opts.BottomElevation, } } - return tx.Commit() + var err error + q.SaaOptsUpdateBatch(ctx, updateSaaOptsParams).Exec(batchExecErr(&err)) + if err != nil { + return err + } + q.TimeseriesMeasurementCreateBatch(ctx, createMmtParams).Exec(batchExecErr(&err)) + return err } diff --git a/api/internal/service/instrument_status.go b/api/internal/service/instrument_status.go index b41a8f6f..b726b537 100644 --- a/api/internal/service/instrument_status.go +++ b/api/internal/service/instrument_status.go @@ -3,40 +3,21 @@ package service import ( "context" - "github.com/USACE/instrumentation-api/api/internal/model" + "github.com/USACE/instrumentation-api/api/internal/db" + "github.com/USACE/instrumentation-api/api/internal/dto" "github.com/google/uuid" ) -type InstrumentStatusService interface { - ListInstrumentStatus(ctx context.Context, instrumentID uuid.UUID) ([]model.InstrumentStatus, error) - GetInstrumentStatus(ctx context.Context, statusID uuid.UUID) (model.InstrumentStatus, error) - CreateOrUpdateInstrumentStatus(ctx context.Context, instrumentID uuid.UUID, ss []model.InstrumentStatus) error - DeleteInstrumentStatus(ctx context.Context, statusID uuid.UUID) error -} - -type instrumentStatusService struct { - db *model.Database - *model.Queries -} - -func NewInstrumentStatusService(db *model.Database, q *model.Queries) *instrumentStatusService { - return &instrumentStatusService{db, q} -} - -func (s instrumentStatusService) CreateOrUpdateInstrumentStatus(ctx context.Context, instrumentID uuid.UUID, ss []model.InstrumentStatus) error { - tx, err := s.db.BeginTxx(ctx, nil) - if err != nil { - return err - } - defer model.TxDo(tx.Rollback) - - qtx := s.WithTx(tx) - - for _, updateStatus := range ss { - if err := qtx.CreateOrUpdateInstrumentStatus(ctx, instrumentID, updateStatus.StatusID, updateStatus.Time); err != nil { - return err +func (s DBService) InstrumentStatusCreateOrUpdateBatch(ctx context.Context, instrumentID uuid.UUID, ss []dto.InstrumentStatus) error { + args := make([]db.InstrumentStatusCreateOrUpdateBatchParams, len(ss)) + for idx, st := range ss { + args[idx] = db.InstrumentStatusCreateOrUpdateBatchParams{ + InstrumentID: instrumentID, + StatusID: st.StatusID, + Time: st.Time, } } - - return tx.Commit() + var err error + s.Queries.InstrumentStatusCreateOrUpdateBatch(ctx, args).Exec(batchExecErr(&err)) + return err } diff --git a/api/internal/service/measurement.go b/api/internal/service/measurement.go index 909185a0..0d0ba302 100644 --- a/api/internal/service/measurement.go +++ b/api/internal/service/measurement.go @@ -2,124 +2,237 @@ package service import ( "context" - "time" + "math" - "github.com/USACE/instrumentation-api/api/internal/model" - "github.com/google/uuid" + "github.com/USACE/instrumentation-api/api/internal/db" + "github.com/USACE/instrumentation-api/api/internal/dto" + "github.com/USACE/instrumentation-api/api/internal/util" ) -type MeasurementService interface { - ListTimeseriesMeasurements(ctx context.Context, timeseriesID uuid.UUID, tw model.TimeWindow, threshold int) (*model.MeasurementCollection, error) - DeleteTimeserieMeasurements(ctx context.Context, timeseriesID uuid.UUID, t time.Time) error - GetTimeseriesConstantMeasurement(ctx context.Context, timeseriesID uuid.UUID, constantName string) (model.Measurement, error) - CreateTimeseriesMeasurement(ctx context.Context, timeseriesID uuid.UUID, t time.Time, value float64) error - CreateOrUpdateTimeseriesMeasurement(ctx context.Context, timeseriesID uuid.UUID, t time.Time, value float64) error - CreateTimeseriesNote(ctx context.Context, timeseriesID uuid.UUID, t time.Time, n model.TimeseriesNote) error - CreateOrUpdateTimeseriesNote(ctx context.Context, timeseriesID uuid.UUID, t time.Time, n model.TimeseriesNote) error - CreateTimeseriesMeasurements(ctx context.Context, mc []model.MeasurementCollection) ([]model.MeasurementCollection, error) - CreateOrUpdateTimeseriesMeasurements(ctx context.Context, mc []model.MeasurementCollection) ([]model.MeasurementCollection, error) - UpdateTimeseriesMeasurements(ctx context.Context, mc []model.MeasurementCollection, tw model.TimeWindow) ([]model.MeasurementCollection, error) - DeleteTimeseriesMeasurementsByRange(ctx context.Context, timeseriesID uuid.UUID, start, end time.Time) error - DeleteTimeseriesNote(ctx context.Context, timeseriesID uuid.UUID, start, end time.Time) error -} - -type measurementService struct { - db *model.Database - *model.Queries -} - -func NewMeasurementService(db *model.Database, q *model.Queries) *measurementService { - return &measurementService{db, q} +func (s DBService) TimeseriesMeasurementListForRange(ctx context.Context, arg db.TimeseriesMeasurementCollectionGetForRangeParams, threshold int) (db.MeasurementCollection, error) { + mc, err := s.Queries.TimeseriesMeasurementCollectionGetForRange(ctx, arg) + if err != nil { + return mc, err + } + return mc, nil } -type mmtCbk func(context.Context, uuid.UUID, time.Time, float64) error -type noteCbk func(context.Context, uuid.UUID, time.Time, model.TimeseriesNote) error +func (s DBService) CreateTimeseriesMeasurements(ctx context.Context, mc []dto.MeasurementCollection) error { + tx, err := s.db.Begin(ctx) + if err != nil { + return err + } + defer txDo(ctx, tx.Rollback) + qtx := s.WithTx(tx) -func createMeasurements(ctx context.Context, mc []model.MeasurementCollection, mmtFn mmtCbk, noteFn noteCbk) error { - for _, c := range mc { - for _, m := range c.Items { - if err := mmtFn(ctx, c.TimeseriesID, m.Time, float64(m.Value)); err != nil { - return err + chunkSize := 1_000 + mm := make([]db.TimeseriesMeasurementCreateBatchParams, chunkSize) + nn := make([]db.TimeseriesNoteCreateBatchParams, chunkSize) + var mIdx, nIdx int + + for idx := range mc { + for _, m := range mc[idx].Items { + v := float64(m.Value) + mm[mIdx] = db.TimeseriesMeasurementCreateBatchParams{ + TimeseriesID: mc[idx].TimeseriesID, + Time: m.Time, + Value: v, } - if m.Masked != nil || m.Validated != nil || m.Annotation != nil { - if err := noteFn(ctx, c.TimeseriesID, m.Time, m.TimeseriesNote); err != nil { + mIdx++ + if mIdx == chunkSize { + qtx.TimeseriesMeasurementCreateBatch(ctx, mm).Exec(batchExecErr(&err)) + if err != nil { return err } + mIdx = 0 + } + if math.IsNaN(v) || math.IsInf(v, 0) { + masked := true + m.Masked = &masked + } + if m.Masked != nil || m.Validated != nil || m.Annotation != nil { + nn[nIdx] = db.TimeseriesNoteCreateBatchParams{ + TimeseriesID: mc[idx].TimeseriesID, + Time: m.Time, + Masked: m.Masked, + Validated: m.Validated, + Annotation: m.Annotation, + } + nIdx++ + if nIdx == chunkSize { + qtx.TimeseriesNoteCreateBatch(ctx, nn).Exec(batchExecErr(&err)) + if err != nil { + return err + } + nIdx = 0 + } } } } - return nil + if mIdx != 0 { + qtx.TimeseriesMeasurementCreateBatch(ctx, mm[:mIdx]).Exec(batchExecErr(&err)) + if err != nil { + return err + } + } + if nIdx != 0 { + qtx.TimeseriesNoteCreateBatch(ctx, nn[:nIdx]).Exec(batchExecErr(&err)) + if err != nil { + return err + } + } + return tx.Commit(ctx) } -// CreateTimeseriesMeasurements creates many timeseries from an array of timeseries -func (s measurementService) CreateTimeseriesMeasurements(ctx context.Context, mc []model.MeasurementCollection) ([]model.MeasurementCollection, error) { - tx, err := s.db.BeginTxx(ctx, nil) +// CreateOrUpdateTimeseriesMeasurements creates many timeseries from an array of timeseries +// If a timeseries measurement already exists for a given timeseries_id and time, the value is updated +func (s DBService) TimeseriesMeasurementCreateOrUpdateBatch(ctx context.Context, mc []dto.MeasurementCollection) error { + tx, err := s.db.Begin(ctx) if err != nil { - return nil, err + return err } - defer model.TxDo(tx.Rollback) - + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) - if err := createMeasurements(ctx, mc, qtx.CreateTimeseriesMeasurement, qtx.CreateTimeseriesNote); err != nil { - return nil, err + if err := createOrUpdateTimeseriesMeasurementsBatch(ctx, qtx, mc); err != nil { + return err } - if err := tx.Commit(); err != nil { - return nil, err - } - - return mc, nil + return tx.Commit(ctx) } -// CreateOrUpdateTimeseriesMeasurements creates many timeseries from an array of timeseries -// If a timeseries measurement already exists for a given timeseries_id and time, the value is updated -func (s measurementService) CreateOrUpdateTimeseriesMeasurements(ctx context.Context, mc []model.MeasurementCollection) ([]model.MeasurementCollection, error) { - tx, err := s.db.BeginTxx(ctx, nil) +func (s DBService) TimeseriesMeasurementUpdateBatch(ctx context.Context, mc []dto.MeasurementCollection, tw *util.TimeWindow) error { + tx, err := s.db.Begin(ctx) if err != nil { - return nil, err + return err } - defer model.TxDo(tx.Rollback) - + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) - if err := createMeasurements(ctx, mc, qtx.CreateOrUpdateTimeseriesMeasurement, qtx.CreateOrUpdateTimeseriesNote); err != nil { - return nil, err + if tw != nil { + if err := deleteTimeseriesMeasurementsRange(ctx, qtx, mc, *tw); err != nil { + return err + } } - - if err := tx.Commit(); err != nil { - return nil, err + if err := createOrUpdateTimeseriesMeasurementsBatch(ctx, qtx, mc); err != nil { + return err } - - return mc, nil + return tx.Commit(ctx) } -// UpdateTimeseriesMeasurements updates many timeseries measurements, "overwriting" time and values to match paylaod -func (s measurementService) UpdateTimeseriesMeasurements(ctx context.Context, mc []model.MeasurementCollection, tw model.TimeWindow) ([]model.MeasurementCollection, error) { - tx, err := s.db.BeginTxx(ctx, nil) +func (s DBService) TimeseriesMeasurementDeleteRange(ctx context.Context, arg db.TimeseriesMeasurementDeleteRangeParams) error { + tx, err := s.db.Begin(ctx) if err != nil { - return nil, err + return err } - defer model.TxDo(tx.Rollback) - + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) - for _, c := range mc { - if err := qtx.DeleteTimeseriesMeasurementsByRange(ctx, c.TimeseriesID, tw.After, tw.Before); err != nil { - return nil, err + if err := qtx.TimeseriesMeasurementDeleteRange(ctx, arg); err != nil { + return err + } + if err := qtx.TimeseriesNoteDeleteRange(ctx, db.TimeseriesNoteDeleteRangeParams(arg)); err != nil { + return err + } + return tx.Commit(ctx) +} + +func createOrUpdateTimeseriesMeasurementsBatch(ctx context.Context, q *db.Queries, mc []dto.MeasurementCollection) error { + chunkSize := 1_000 + mm := make([]db.TimeseriesMeasurementCreateOrUpdateBatchParams, chunkSize) + nn := make([]db.TimeseriesNoteCreateOrUpdateBatchParams, chunkSize) + var mIdx, nIdx int + + var err error + for idx := range mc { + for _, m := range mc[idx].Items { + mm[mIdx] = db.TimeseriesMeasurementCreateOrUpdateBatchParams{ + TimeseriesID: mc[idx].TimeseriesID, + Time: m.Time, + Value: float64(m.Value), + } + mIdx++ + if mIdx == chunkSize { + q.TimeseriesMeasurementCreateOrUpdateBatch(ctx, mm).Exec(batchExecErr(&err)) + if err != nil { + return err + } + mIdx = 0 + } + if m.Masked != nil || m.Validated != nil || m.Annotation != nil { + nn[nIdx] = db.TimeseriesNoteCreateOrUpdateBatchParams{ + TimeseriesID: mc[idx].TimeseriesID, + Time: m.Time, + Masked: m.Masked, + Validated: m.Validated, + Annotation: m.Annotation, + } + nIdx++ + if nIdx == chunkSize { + q.TimeseriesNoteCreateOrUpdateBatch(ctx, nn).Exec(batchExecErr(&err)) + if err != nil { + return err + } + nIdx = 0 + } + } } - if err := qtx.DeleteTimeseriesNote(ctx, c.TimeseriesID, tw.After, tw.Before); err != nil { - return nil, err + } + if mIdx != 0 { + q.TimeseriesMeasurementCreateOrUpdateBatch(ctx, mm[:mIdx]).Exec(batchExecErr(&err)) + if err != nil { + return err } } - - if err := createMeasurements(ctx, mc, qtx.CreateTimeseriesMeasurement, qtx.CreateTimeseriesNote); err != nil { - return nil, err + if nIdx != 0 { + q.TimeseriesNoteCreateOrUpdateBatch(ctx, nn[:mIdx]).Exec(batchExecErr(&err)) + if err != nil { + return err + } } + return nil +} - if err := tx.Commit(); err != nil { - return nil, err - } +func deleteTimeseriesMeasurementsRange(ctx context.Context, q *db.Queries, mc []dto.MeasurementCollection, tw util.TimeWindow) error { + chunkSize := 1_000 + delMmParams := make([]db.TimeseriesMeasurementDeleteRangeBatchParams, chunkSize) + delNnParams := make([]db.TimeseriesNoteDeleteRangeBatchParams, chunkSize) + var dIdx int - return mc, nil + var err error + for _, c := range mc { + delMmParams[dIdx] = db.TimeseriesMeasurementDeleteRangeBatchParams{ + TimeseriesID: c.TimeseriesID, + After: tw.After, + Before: tw.Before, + } + delNnParams[dIdx] = db.TimeseriesNoteDeleteRangeBatchParams{ + TimeseriesID: c.TimeseriesID, + After: tw.After, + Before: tw.Before, + } + dIdx++ + if dIdx == chunkSize { + q.TimeseriesMeasurementDeleteRangeBatch(ctx, delMmParams).Exec(batchExecErr(&err)) + if err != nil { + return err + } + q.TimeseriesNoteDeleteRangeBatch(ctx, delNnParams).Exec(batchExecErr(&err)) + if err != nil { + return err + } + dIdx = 0 + } + } + if dIdx != 0 { + q.TimeseriesMeasurementDeleteRangeBatch(ctx, delMmParams[:dIdx]).Exec(batchExecErr(&err)) + if err != nil { + return err + } + q.TimeseriesNoteDeleteRangeBatch(ctx, delNnParams[:dIdx]).Exec(batchExecErr(&err)) + if err != nil { + return err + } + } + return nil } diff --git a/api/internal/service/measurement_inclinometer.go b/api/internal/service/measurement_inclinometer.go deleted file mode 100644 index 8805a046..00000000 --- a/api/internal/service/measurement_inclinometer.go +++ /dev/null @@ -1,120 +0,0 @@ -package service - -import ( - "context" - "time" - - "github.com/USACE/instrumentation-api/api/internal/model" - "github.com/google/uuid" -) - -type InclinometerMeasurementService interface { - ListInclinometerMeasurements(ctx context.Context, timeseriesID uuid.UUID, tw model.TimeWindow) (*model.InclinometerMeasurementCollection, error) - ListInclinometerMeasurementValues(ctx context.Context, timeseriesID uuid.UUID, time time.Time, inclConstant float64) ([]*model.InclinometerMeasurementValues, error) - DeleteInclinometerMeasurement(ctx context.Context, timeseriesID uuid.UUID, time time.Time) error - CreateOrUpdateInclinometerMeasurements(ctx context.Context, im []model.InclinometerMeasurementCollection, p model.Profile, createDate time.Time) ([]model.InclinometerMeasurementCollection, error) - ListInstrumentIDsFromTimeseriesID(ctx context.Context, timeseriesID uuid.UUID) ([]uuid.UUID, error) - CreateTimeseriesConstant(ctx context.Context, timeseriesID uuid.UUID, parameterName string, unitName string, value float64) error -} - -type inclinometerMeasurementService struct { - db *model.Database - *model.Queries -} - -func NewInclinometerMeasurementService(db *model.Database, q *model.Queries) *inclinometerMeasurementService { - return &inclinometerMeasurementService{db, q} -} - -// CreateInclinometerMeasurements creates many inclinometer from an array of inclinometer -// If a inclinometer measurement already exists for a given timeseries_id and time, the values is updated -func (s inclinometerMeasurementService) CreateOrUpdateInclinometerMeasurements(ctx context.Context, im []model.InclinometerMeasurementCollection, p model.Profile, createDate time.Time) ([]model.InclinometerMeasurementCollection, error) { - tx, err := s.db.BeginTxx(ctx, nil) - if err != nil { - return nil, err - } - defer model.TxDo(tx.Rollback) - - qtx := s.WithTx(tx) - - // Iterate All inclinometer Measurements - for idx := range im { - for i := range im[idx].Inclinometers { - im[idx].Inclinometers[i].Creator = p.ID - im[idx].Inclinometers[i].CreateDate = createDate - if err := qtx.CreateOrUpdateInclinometerMeasurement(ctx, im[idx].TimeseriesID, im[idx].Inclinometers[i].Time, im[idx].Inclinometers[i].Values, p.ID, createDate); err != nil { - return nil, err - } - } - } - if err := tx.Commit(); err != nil { - return nil, err - } - - return im, nil -} - -// CreateTimeseriesConstant creates timeseries constant -func (s inclinometerMeasurementService) CreateTimeseriesConstant(ctx context.Context, timeseriesID uuid.UUID, parameterName string, unitName string, value float64) error { - tx, err := s.db.BeginTxx(ctx, nil) - if err != nil { - return err - } - defer model.TxDo(tx.Rollback) - - qtx := s.WithTx(tx) - - instrumentIDs, err := qtx.ListInstrumentIDsFromTimeseriesID(ctx, timeseriesID) - if err != nil { - return err - } - - parameterIDs, err := qtx.ListParameterIDsFromParameterName(ctx, parameterName) - if err != nil { - return err - } - - unitIDs, err := qtx.ListUnitIDsFromUnitName(ctx, unitName) - if err != nil { - return err - } - - if len(instrumentIDs) > 0 && len(parameterIDs) > 0 && len(unitIDs) > 0 { - t := model.Timeseries{} - measurement := model.Measurement{} - measurements := []model.Measurement{} - mc := model.MeasurementCollection{} - mcs := []model.MeasurementCollection{} - ts := []model.Timeseries{} - - t.InstrumentID = instrumentIDs[0] - t.Slug = parameterName - t.Name = parameterName - t.ParameterID = parameterIDs[0] - t.UnitID = unitIDs[0] - ts = append(ts, t) - - t.Type = model.ConstantTimeseriesType - tsNew, err := qtx.CreateTimeseries(ctx, t) - if err != nil { - return err - } - // Assign timeseries - if err := qtx.CreateInstrumentConstant(ctx, t.InstrumentID, t.ID); err != nil { - return err - } - - measurement.Time = time.Now() - measurement.Value = model.FloatNanInf(value) - measurements = append(measurements, measurement) - mc.TimeseriesID = tsNew.ID - mc.Items = measurements - mcs = append(mcs, mc) - - if err = createMeasurements(ctx, mcs, qtx.CreateOrUpdateTimeseriesMeasurement, qtx.CreateOrUpdateTimeseriesNote); err != nil { - return err - } - } - - return nil -} diff --git a/api/internal/service/opendcs.go b/api/internal/service/opendcs.go deleted file mode 100644 index 00af0f17..00000000 --- a/api/internal/service/opendcs.go +++ /dev/null @@ -1,20 +0,0 @@ -package service - -import ( - "context" - - "github.com/USACE/instrumentation-api/api/internal/model" -) - -type OpendcsService interface { - ListOpendcsSites(ctx context.Context) ([]model.Site, error) -} - -type opendcsService struct { - db *model.Database - *model.Queries -} - -func NewOpendcsService(db *model.Database, q *model.Queries) *opendcsService { - return &opendcsService{db, q} -} diff --git a/api/internal/service/plot_config.go b/api/internal/service/plot_config.go index 7d1f0bc0..93607c6c 100644 --- a/api/internal/service/plot_config.go +++ b/api/internal/service/plot_config.go @@ -3,25 +3,54 @@ package service import ( "context" - "github.com/USACE/instrumentation-api/api/internal/model" + "github.com/USACE/instrumentation-api/api/internal/db" + "github.com/USACE/instrumentation-api/api/internal/dto" "github.com/google/uuid" ) -type PlotConfigService interface { - ListPlotConfigs(ctx context.Context, projectID uuid.UUID) ([]model.PlotConfig, error) - GetPlotConfig(ctx context.Context, plotconfigID uuid.UUID) (model.PlotConfig, error) - DeletePlotConfig(ctx context.Context, projectID, plotConfigID uuid.UUID) error - plotConfigBullseyePlotService - plotConfigContourPlotService - plotConfigProfilePlotService - plotConfigScatterLinePlotService +func createPlotConfigCommon(ctx context.Context, q *db.Queries, pc dto.PlotConfig) (uuid.UUID, error) { + pcID, err := q.PlotConfigCreate(ctx, db.PlotConfigCreateParams{ + Name: pc.Name, + ProjectID: pc.ProjectID, + CreatedBy: pc.CreatedBy, + CreatedAt: pc.CreatedAt, + PlotType: db.PlotType(pc.PlotType), + }) + if err != nil { + return pcID, err + } + err = q.PlotConfigSettingsCreate(ctx, db.PlotConfigSettingsCreateParams{ + ID: pcID, + ShowMasked: pc.ShowMasked, + ShowNonvalidated: pc.ShowNonValidated, + ShowComments: pc.ShowComments, + AutoRange: pc.AutoRange, + DateRange: pc.DateRange, + Threshold: int32(pc.Threshold), + }) + return pcID, err } -type plotConfigService struct { - db *model.Database - *model.Queries -} - -func NewPlotConfigService(db *model.Database, q *model.Queries) *plotConfigService { - return &plotConfigService{db, q} +func updatePlotConfigCommon(ctx context.Context, q *db.Queries, pc dto.PlotConfig) error { + if err := q.PlotConfigUpdate(ctx, db.PlotConfigUpdateParams{ + ProjectID: pc.ProjectID, + ID: pc.ID, + Name: pc.Name, + UpdatedBy: pc.UpdatedBy, + UpdatedAt: pc.UpdatedAt, + }); err != nil { + return err + } + if err := q.PlotConfigSettingsDelete(ctx, pc.ID); err != nil { + return err + } + return q.PlotConfigSettingsCreate(ctx, db.PlotConfigSettingsCreateParams{ + ID: pc.ID, + ShowMasked: pc.ShowMasked, + ShowNonvalidated: pc.ShowNonValidated, + ShowComments: pc.ShowComments, + AutoRange: pc.AutoRange, + DateRange: pc.DateRange, + Threshold: int32(pc.Threshold), + }) } diff --git a/api/internal/service/plot_config_bullseye.go b/api/internal/service/plot_config_bullseye.go index 2fea5a11..3ef94130 100644 --- a/api/internal/service/plot_config_bullseye.go +++ b/api/internal/service/plot_config_bullseye.go @@ -3,79 +3,63 @@ package service import ( "context" - "github.com/USACE/instrumentation-api/api/internal/model" - "github.com/google/uuid" + "github.com/USACE/instrumentation-api/api/internal/db" + "github.com/USACE/instrumentation-api/api/internal/dto" ) -type plotConfigBullseyePlotService interface { - CreatePlotConfigBullseyePlot(ctx context.Context, pc model.PlotConfigBullseyePlot) (model.PlotConfig, error) - UpdatePlotConfigBullseyePlot(ctx context.Context, pc model.PlotConfigBullseyePlot) (model.PlotConfig, error) - ListPlotConfigMeasurementsBullseyePlot(ctx context.Context, plotConfigID uuid.UUID, tw model.TimeWindow) ([]model.PlotConfigMeasurementBullseyePlot, error) -} - -func (s plotConfigService) CreatePlotConfigBullseyePlot(ctx context.Context, pc model.PlotConfigBullseyePlot) (model.PlotConfig, error) { - tx, err := s.db.BeginTxx(ctx, nil) +func (s DBService) PlotConfigCreateBullseye(ctx context.Context, pc dto.PlotConfigBullseyePlot) (db.VPlotConfiguration, error) { + var a db.VPlotConfiguration + tx, err := s.db.Begin(ctx) if err != nil { - return model.PlotConfig{}, err + return a, err } - defer model.TxDo(tx.Rollback) - + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) - pc.PlotType = model.BullseyePlotType - pcID, err := qtx.CreatePlotConfig(ctx, pc.PlotConfig) + pc.PlotType = dto.BullseyePlotType + pcID, err := createPlotConfigCommon(ctx, qtx, pc.PlotConfig) if err != nil { - return model.PlotConfig{}, err + return a, err } - - if err := qtx.CreatePlotConfigSettings(ctx, pcID, pc.PlotConfigSettings); err != nil { - return model.PlotConfig{}, err + if err := qtx.PlotBullseyeConfigCreate(ctx, db.PlotBullseyeConfigCreateParams{ + PlotConfigID: pcID, + XAxisTimeseriesID: &pc.Display.XAxisTimeseriesID, + YAxisTimeseriesID: &pc.Display.YAxisTimeseriesID, + }); err != nil { + return a, err } - - if err := qtx.CreatePlotBullseyeConfig(ctx, pcID, pc.Display); err != nil { - return model.PlotConfig{}, err - } - - pcNew, err := qtx.GetPlotConfig(ctx, pcID) + a, err = qtx.PlotConfigGet(ctx, pcID) if err != nil { - return model.PlotConfig{}, err + return a, err } - - err = tx.Commit() - - return pcNew, err + err = tx.Commit(ctx) + return a, err } -func (s plotConfigService) UpdatePlotConfigBullseyePlot(ctx context.Context, pc model.PlotConfigBullseyePlot) (model.PlotConfig, error) { - tx, err := s.db.BeginTxx(ctx, nil) +func (s DBService) PlotConfigUpdateBullseye(ctx context.Context, pc dto.PlotConfigBullseyePlot) (db.VPlotConfiguration, error) { + var a db.VPlotConfiguration + tx, err := s.db.Begin(ctx) if err != nil { - return model.PlotConfig{}, err + return a, err } - defer model.TxDo(tx.Rollback) - + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) - if err := qtx.UpdatePlotConfig(ctx, pc.PlotConfig); err != nil { - return model.PlotConfig{}, err - } - - if err := qtx.UpdatePlotBullseyeConfig(ctx, pc.ID, pc.Display); err != nil { - return model.PlotConfig{}, err + if err := updatePlotConfigCommon(ctx, qtx, pc.PlotConfig); err != nil { + return a, err } - - if err := qtx.DeletePlotConfigSettings(ctx, pc.ID); err != nil { - return model.PlotConfig{}, err - } - - if err := qtx.CreatePlotConfigSettings(ctx, pc.ID, pc.PlotConfigSettings); err != nil { - return model.PlotConfig{}, err + if err := qtx.PlotBullseyeConfigUpdate(ctx, db.PlotBullseyeConfigUpdateParams{ + PlotConfigID: pc.ID, + XAxisTimeseriesID: &pc.Display.XAxisTimeseriesID, + YAxisTimeseriesID: &pc.Display.YAxisTimeseriesID, + }); err != nil { + return a, err } - - pcNew, err := qtx.GetPlotConfig(ctx, pc.ID) + pcNew, err := qtx.PlotConfigGet(ctx, pc.ID) if err != nil { - return model.PlotConfig{}, err + return a, err } - err = tx.Commit() + err = tx.Commit(ctx) return pcNew, err } diff --git a/api/internal/service/plot_config_contour.go b/api/internal/service/plot_config_contour.go index d73026d5..355ec374 100644 --- a/api/internal/service/plot_config_contour.go +++ b/api/internal/service/plot_config_contour.go @@ -2,122 +2,129 @@ package service import ( "context" + "errors" "time" - "github.com/USACE/instrumentation-api/api/internal/model" + "github.com/USACE/instrumentation-api/api/internal/db" + "github.com/USACE/instrumentation-api/api/internal/dto" "github.com/google/uuid" ) -type plotConfigContourPlotService interface { - CreatePlotConfigContourPlot(ctx context.Context, pc model.PlotConfigContourPlot) (model.PlotConfig, error) - UpdatePlotConfigContourPlot(ctx context.Context, pc model.PlotConfigContourPlot) (model.PlotConfig, error) - ListPlotConfigTimesContourPlot(ctx context.Context, plotConfigID uuid.UUID, tw model.TimeWindow) ([]time.Time, error) - GetPlotConfigMeasurementsContourPlot(ctx context.Context, plotConfigID uuid.UUID, t time.Time) (model.AggregatePlotConfigMeasurementsContourPlot, error) +type AggregatePlotConfigMeasurementsContourPlot struct { + X []float64 `json:"x"` + Y []float64 `json:"y"` + Z []*float64 `json:"z"` } -func (s plotConfigService) CreatePlotConfigContourPlot(ctx context.Context, pc model.PlotConfigContourPlot) (model.PlotConfig, error) { - tx, err := s.db.BeginTxx(ctx, nil) +func (s DBService) PlotConfigCreateContour(ctx context.Context, pc dto.PlotConfigContourPlot) (db.VPlotConfiguration, error) { + var a db.VPlotConfiguration + tx, err := s.db.Begin(ctx) if err != nil { - return model.PlotConfig{}, err + return a, err } - defer model.TxDo(tx.Rollback) - + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) - pc.PlotType = model.ContourPlotType - pcID, err := qtx.CreatePlotConfig(ctx, pc.PlotConfig) - if err != nil { - return model.PlotConfig{}, err - } - - if err := qtx.CreatePlotConfigSettings(ctx, pcID, pc.PlotConfigSettings); err != nil { - return model.PlotConfig{}, err - } - - if err := qtx.CreatePlotContourConfig(ctx, pcID, pc.Display); err != nil { - return model.PlotConfig{}, err - } - - for _, tsID := range pc.Display.TimeseriesIDs { - if err := qtx.CreatePlotContourConfigTimeseries(ctx, pcID, tsID); err != nil { - return model.PlotConfig{}, err - } - } - - pcNew, err := qtx.GetPlotConfig(ctx, pcID) + pc.PlotType = dto.ContourPlotType + pcID, err := createPlotConfigCommon(ctx, qtx, pc.PlotConfig) + if err := qtx.PlotContourConfigCreate(ctx, db.PlotContourConfigCreateParams{ + PlotConfigID: pcID, + Time: pc.Display.Time, + LocfBackfill: pc.Display.LocfBackfill, + GradientSmoothing: pc.Display.GradientSmoothing, + ContourSmoothing: pc.Display.ContourSmoothing, + ShowLabels: pc.Display.ShowLabels, + }); err != nil { + return a, err + } + if err := createPlotContourConfigTimeseriesBatch(ctx, qtx, pcID, pc.Display.TimeseriesIDs); err != nil { + return a, err + } + a, err = qtx.PlotConfigGet(ctx, pcID) if err != nil { - return model.PlotConfig{}, err + return a, err } + err = tx.Commit(ctx) - err = tx.Commit() - - return pcNew, err + return a, err } -func (s plotConfigService) UpdatePlotConfigContourPlot(ctx context.Context, pc model.PlotConfigContourPlot) (model.PlotConfig, error) { - tx, err := s.db.BeginTxx(ctx, nil) +func (s DBService) PlotConfigUpdateContour(ctx context.Context, pc dto.PlotConfigContourPlot) (db.VPlotConfiguration, error) { + var a db.VPlotConfiguration + tx, err := s.db.Begin(ctx) if err != nil { - return model.PlotConfig{}, err + return a, err } - defer model.TxDo(tx.Rollback) + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) - if err := qtx.UpdatePlotConfig(ctx, pc.PlotConfig); err != nil { - return model.PlotConfig{}, err + if err := updatePlotConfigCommon(ctx, qtx, pc.PlotConfig); err != nil { + return a, err } - - if err := qtx.UpdatePlotContourConfig(ctx, pc.ID, pc.Display); err != nil { - return model.PlotConfig{}, err + if err := qtx.PlotContourConfigUpdate(ctx, db.PlotContourConfigUpdateParams{ + PlotConfigID: pc.ID, + Time: pc.Display.Time, + LocfBackfill: pc.Display.LocfBackfill, + GradientSmoothing: pc.Display.GradientSmoothing, + ContourSmoothing: pc.Display.ContourSmoothing, + ShowLabels: pc.Display.ShowLabels, + }); err != nil { + return a, err } - if err := qtx.DeletePlotConfigSettings(ctx, pc.ID); err != nil { - return model.PlotConfig{}, err + if err := qtx.PlotContourConfigTimeseriesDeleteForPlotContourConfig(ctx, pc.ID); err != nil { + return a, err } - - if err := qtx.CreatePlotConfigSettings(ctx, pc.ID, pc.PlotConfigSettings); err != nil { - return model.PlotConfig{}, err - } - - if err := qtx.DeleteAllPlotContourConfigTimeseries(ctx, pc.ID); err != nil { - return model.PlotConfig{}, err + if err := createPlotContourConfigTimeseriesBatch(ctx, qtx, pc.ID, pc.Display.TimeseriesIDs); err != nil { + return a, err } - for _, tsID := range pc.Display.TimeseriesIDs { - if err := qtx.CreatePlotContourConfigTimeseries(ctx, pc.ID, tsID); err != nil { - return model.PlotConfig{}, err - } - } - - pcNew, err := qtx.GetPlotConfig(ctx, pc.ID) + pcNew, err := qtx.PlotConfigGet(ctx, pc.ID) if err != nil { - return model.PlotConfig{}, err + return a, err } - err = tx.Commit() + err = tx.Commit(ctx) return pcNew, err } -func (s plotConfigService) GetPlotConfigMeasurementsContourPlot(ctx context.Context, plotConfigID uuid.UUID, t time.Time) (model.AggregatePlotConfigMeasurementsContourPlot, error) { - q := s.db.Queries() - - mm, err := q.ListPlotConfigMeasurementsContourPlot(ctx, plotConfigID, t) +func (s DBService) PlotConfigMeasurementListContour(ctx context.Context, pcID uuid.UUID, t time.Time) (AggregatePlotConfigMeasurementsContourPlot, error) { + var a AggregatePlotConfigMeasurementsContourPlot + mm, err := s.Queries.PlotConfigMeasurementListContour(ctx, db.PlotConfigMeasurementListContourParams{ + PlotConfigID: pcID, + Time: t, + }) if err != nil { - return model.AggregatePlotConfigMeasurementsContourPlot{}, err + return a, err } - - am := model.AggregatePlotConfigMeasurementsContourPlot{ + a = AggregatePlotConfigMeasurementsContourPlot{ X: make([]float64, len(mm)), Y: make([]float64, len(mm)), Z: make([]*float64, len(mm)), } - for idx := range mm { - am.X[idx] = mm[idx].X - am.Y[idx] = mm[idx].Y - am.Z[idx] = mm[idx].Z + z, ok := mm[idx].Z.(*float64) + if !ok { + return a, errors.New("failed type assertion: interface to float64") + } + a.X[idx] = mm[idx].X + a.Y[idx] = mm[idx].Y + a.Z[idx] = z } + return a, nil +} - return am, nil +func createPlotContourConfigTimeseriesBatch(ctx context.Context, q *db.Queries, pcID uuid.UUID, tt []uuid.UUID) error { + args := make([]db.PlotContourConfigTimeseriesCreateBatchParams, len(tt)) + for idx, tsID := range tt { + args[idx] = db.PlotContourConfigTimeseriesCreateBatchParams{ + PlotContourConfigID: pcID, + TimeseriesID: tsID, + } + } + var err error + q.PlotContourConfigTimeseriesCreateBatch(ctx, args).Exec(batchExecErr(&err)) + return err } diff --git a/api/internal/service/plot_config_profile.go b/api/internal/service/plot_config_profile.go index 2bc9920c..d9e85c8a 100644 --- a/api/internal/service/plot_config_profile.go +++ b/api/internal/service/plot_config_profile.go @@ -3,78 +3,62 @@ package service import ( "context" - "github.com/USACE/instrumentation-api/api/internal/model" + "github.com/USACE/instrumentation-api/api/internal/db" + "github.com/USACE/instrumentation-api/api/internal/dto" ) -type plotConfigProfilePlotService interface { - CreatePlotConfigProfilePlot(ctx context.Context, pc model.PlotConfigProfilePlot) (model.PlotConfig, error) - UpdatePlotConfigProfilePlot(ctx context.Context, pc model.PlotConfigProfilePlot) (model.PlotConfig, error) -} - -func (s plotConfigService) CreatePlotConfigProfilePlot(ctx context.Context, pc model.PlotConfigProfilePlot) (model.PlotConfig, error) { - tx, err := s.db.BeginTxx(ctx, nil) +func (s DBService) PlotConfigCreateProfile(ctx context.Context, pc dto.PlotConfigProfilePlot) (db.VPlotConfiguration, error) { + var a db.VPlotConfiguration + tx, err := s.db.Begin(ctx) if err != nil { - return model.PlotConfig{}, err + return a, err } - defer model.TxDo(tx.Rollback) - + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) - pc.PlotType = model.ProfilePlotType - pcID, err := qtx.CreatePlotConfig(ctx, pc.PlotConfig) + pc.PlotType = dto.ProfilePlotType + pcID, err := createPlotConfigCommon(ctx, qtx, pc.PlotConfig) if err != nil { - return model.PlotConfig{}, err + return a, err } - - if err := qtx.CreatePlotConfigSettings(ctx, pcID, pc.PlotConfigSettings); err != nil { - return model.PlotConfig{}, err + if err := qtx.PlotProfileConfigCreate(ctx, db.PlotProfileConfigCreateParams{ + PlotConfigID: pcID, + InstrumentID: pc.Display.InstrumentID, + }); err != nil { + return a, err } - - if err := qtx.CreatePlotProfileConfig(ctx, pcID, pc.Display); err != nil { - return model.PlotConfig{}, err - } - - pcNew, err := qtx.GetPlotConfig(ctx, pcID) + pcNew, err := qtx.PlotConfigGet(ctx, pcID) if err != nil { - return model.PlotConfig{}, err + return a, err } - - err = tx.Commit() + err = tx.Commit(ctx) return pcNew, err } -func (s plotConfigService) UpdatePlotConfigProfilePlot(ctx context.Context, pc model.PlotConfigProfilePlot) (model.PlotConfig, error) { - tx, err := s.db.BeginTxx(ctx, nil) +func (s DBService) PlotConfigUpdateProfile(ctx context.Context, pc dto.PlotConfigProfilePlot) (db.VPlotConfiguration, error) { + var a db.VPlotConfiguration + tx, err := s.db.Begin(ctx) if err != nil { - return model.PlotConfig{}, err + return a, err } - defer model.TxDo(tx.Rollback) - + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) - if err := qtx.UpdatePlotConfig(ctx, pc.PlotConfig); err != nil { - return model.PlotConfig{}, err + if err := updatePlotConfigCommon(ctx, qtx, pc.PlotConfig); err != nil { + return a, err } - - if err := qtx.UpdatePlotProfileConfig(ctx, pc.ID, pc.Display); err != nil { - return model.PlotConfig{}, err - } - - if err := qtx.DeletePlotConfigSettings(ctx, pc.ID); err != nil { - return model.PlotConfig{}, err - } - - if err := qtx.CreatePlotConfigSettings(ctx, pc.ID, pc.PlotConfigSettings); err != nil { - return model.PlotConfig{}, err + if err := qtx.PlotProfileConfigUpdate(ctx, db.PlotProfileConfigUpdateParams{ + PlotConfigID: pc.ID, + InstrumentID: pc.Display.InstrumentID, + }); err != nil { + return a, err } - - pcNew, err := qtx.GetPlotConfig(ctx, pc.ID) + a, err = qtx.PlotConfigGet(ctx, pc.ID) if err != nil { - return model.PlotConfig{}, err + return a, err } + err = tx.Commit(ctx) - err = tx.Commit() - - return pcNew, err + return a, err } diff --git a/api/internal/service/plot_config_scatter_line.go b/api/internal/service/plot_config_scatter_line.go index 836e888f..4e790201 100644 --- a/api/internal/service/plot_config_scatter_line.go +++ b/api/internal/service/plot_config_scatter_line.go @@ -3,115 +3,92 @@ package service import ( "context" "fmt" - "log" "strings" - "github.com/USACE/instrumentation-api/api/internal/model" + "github.com/USACE/instrumentation-api/api/internal/db" + "github.com/USACE/instrumentation-api/api/internal/dto" "github.com/google/uuid" ) -type plotConfigScatterLinePlotService interface { - CreatePlotConfigScatterLinePlot(ctx context.Context, pc model.PlotConfigScatterLinePlot) (model.PlotConfig, error) - UpdatePlotConfigScatterLinePlot(ctx context.Context, pc model.PlotConfigScatterLinePlot) (model.PlotConfig, error) -} - -func (s plotConfigService) CreatePlotConfigScatterLinePlot(ctx context.Context, pc model.PlotConfigScatterLinePlot) (model.PlotConfig, error) { - tx, err := s.db.BeginTxx(ctx, nil) +func (s DBService) PlotConfigCreateScatterLine(ctx context.Context, pc dto.PlotConfigScatterLinePlot) (db.VPlotConfiguration, error) { + var a db.VPlotConfiguration + tx, err := s.db.Begin(ctx) if err != nil { - return model.PlotConfig{}, err + return a, err } - defer model.TxDo(tx.Rollback) - + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) - pc.PlotType = model.ScatterLinePlotType - pcID, err := qtx.CreatePlotConfig(ctx, pc.PlotConfig) - if err != nil { - return model.PlotConfig{}, err - } - - if err := qtx.CreatePlotConfigSettings(ctx, pcID, pc.PlotConfigSettings); err != nil { - return model.PlotConfig{}, err - } - + pc.PlotType = dto.ScatterLinePlotType + pcID, err := createPlotConfigCommon(ctx, qtx, pc.PlotConfig) if err := validateCreateTraces(ctx, qtx, pcID, pc.Display.Traces); err != nil { - return model.PlotConfig{}, err + return a, err } - - if err := qtx.CreatePlotConfigScatterLineLayout(ctx, pcID, pc.Display.Layout); err != nil { - return model.PlotConfig{}, err + if err := qtx.PlotConfigScatterLineLayoutCreate(ctx, db.PlotConfigScatterLineLayoutCreateParams{ + PlotConfigID: pcID, + YAxisTitle: pc.Display.Layout.YAxisTitle, + Y2AxisTitle: pc.Display.Layout.Y2AxisTitle, + }); err != nil { + return a, err } - if err := validateCreateCustomShapes(ctx, qtx, pcID, pc.Display.Layout.CustomShapes); err != nil { - return model.PlotConfig{}, err + return a, err } - pcNew, err := qtx.GetPlotConfig(ctx, pcID) + pcNew, err := qtx.PlotConfigGet(ctx, pcID) if err != nil { - return model.PlotConfig{}, err + return a, err } - - err = tx.Commit() + err = tx.Commit(ctx) return pcNew, err } -func (s plotConfigService) UpdatePlotConfigScatterLinePlot(ctx context.Context, pc model.PlotConfigScatterLinePlot) (model.PlotConfig, error) { - tx, err := s.db.BeginTxx(ctx, nil) +func (s DBService) PlotConfigUpdateScatterLine(ctx context.Context, pc dto.PlotConfigScatterLinePlot) (db.VPlotConfiguration, error) { + var a db.VPlotConfiguration + tx, err := s.db.Begin(ctx) if err != nil { - return model.PlotConfig{}, err + return a, err } - defer model.TxDo(tx.Rollback) - + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) - if err := qtx.UpdatePlotConfig(ctx, pc.PlotConfig); err != nil { - return model.PlotConfig{}, err - } - - if err := qtx.DeletePlotConfigSettings(ctx, pc.ID); err != nil { - log.Printf("fails on delete %s", pc.ID) - return model.PlotConfig{}, err + if err := updatePlotConfigCommon(ctx, qtx, pc.PlotConfig); err != nil { + return a, err } - - if err := qtx.DeleteAllPlotConfigTimeseriesTraces(ctx, pc.ID); err != nil { - return model.PlotConfig{}, err - } - - if err := qtx.DeleteAllPlotConfigCustomShapes(ctx, pc.ID); err != nil { - return model.PlotConfig{}, err + if err := qtx.PlotConfigTimeseriesTraceDeleteForPlotConfig(ctx, &pc.ID); err != nil { + return a, err } - - if err := qtx.CreatePlotConfigSettings(ctx, pc.ID, pc.PlotConfigSettings); err != nil { - log.Printf("fails on create %s, %+v", pc.ID, pc.PlotConfigSettings) - return model.PlotConfig{}, err + if err := qtx.PlotConfigCustomShapeDeleteForPlotConfig(ctx, &pc.ID); err != nil { + return a, err } - if err := validateCreateTraces(ctx, qtx, pc.ID, pc.Display.Traces); err != nil { - return model.PlotConfig{}, err + return a, err } - - if err := qtx.UpdatePlotConfigScatterLineLayout(ctx, pc.ID, pc.Display.Layout); err != nil { - return model.PlotConfig{}, err + if err := qtx.PlotConfigScatterLineLayoutUpdate(ctx, db.PlotConfigScatterLineLayoutUpdateParams{ + PlotConfigID: pc.ID, + YAxisTitle: pc.Display.Layout.YAxisTitle, + Y2AxisTitle: pc.Display.Layout.Y2AxisTitle, + }); err != nil { + return a, err } - if err := validateCreateCustomShapes(ctx, qtx, pc.ID, pc.Display.Layout.CustomShapes); err != nil { - return model.PlotConfig{}, err + return a, err } - pcNew, err := qtx.GetPlotConfig(ctx, pc.ID) + pcNew, err := qtx.PlotConfigGet(ctx, pc.ID) if err != nil { - return model.PlotConfig{}, err + return a, err } - err = tx.Commit() + err = tx.Commit(ctx) return pcNew, err } -func validateCreateTraces(ctx context.Context, q *model.Queries, pcID uuid.UUID, trs []model.PlotConfigScatterLineTimeseriesTrace) error { - for _, tr := range trs { +func validateCreateTraces(ctx context.Context, q *db.Queries, pcID uuid.UUID, trs []dto.PlotConfigScatterLineTimeseriesTrace) error { + args := make([]db.PlotConfigTimeseriesTracesCreateBatchParams, len(trs)) + for idx, tr := range trs { tr.PlotConfigurationID = pcID - if err := validateColor(tr.Color); err != nil { return err } @@ -121,23 +98,37 @@ func validateCreateTraces(ctx context.Context, q *model.Queries, pcID uuid.UUID, if tr.YAxis == "" { tr.YAxis = "y1" } - - if err := q.CreatePlotConfigTimeseriesTrace(ctx, tr); err != nil { - return err + args[idx] = db.PlotConfigTimeseriesTracesCreateBatchParams{ + PlotConfigurationID: &tr.PlotConfigurationID, + TimeseriesID: &tr.TimeseriesID, + TraceOrder: int32(tr.TraceOrder), + Color: tr.Color, + LineStyle: db.LineStyle(tr.LineStyle), + Width: tr.Width, + ShowMarkers: tr.ShowMarkers, + YAxis: db.YAxis(tr.YAxis), } } + var err error + q.PlotConfigTimeseriesTracesCreateBatch(ctx, args).Exec(batchExecErr(&err)) + if err != nil { + return err + } return nil } -func validateCreateCustomShapes(ctx context.Context, q *model.Queries, pcID uuid.UUID, css []model.PlotConfigScatterLineCustomShape) error { +func validateCreateCustomShapes(ctx context.Context, q *db.Queries, pcID uuid.UUID, css []dto.PlotConfigScatterLineCustomShape) error { for _, cs := range css { - cs.PlotConfigurationID = pcID - if err := validateColor(cs.Color); err != nil { return err } - - if err := q.CreatePlotConfigCustomShape(ctx, cs); err != nil { + if err := q.PlotConfigCustomShapeCreate(ctx, db.PlotConfigCustomShapeCreateParams{ + PlotConfigurationID: &pcID, + Enabled: cs.Enabled, + Name: cs.Name, + DataPoint: cs.DataPoint, + Color: cs.Color, + }); err != nil { return err } } diff --git a/api/internal/service/profile.go b/api/internal/service/profile.go index f1d9f833..0e0c613f 100644 --- a/api/internal/service/profile.go +++ b/api/internal/service/profile.go @@ -5,102 +5,32 @@ import ( "errors" "strings" - "github.com/USACE/instrumentation-api/api/internal/model" + "github.com/USACE/instrumentation-api/api/internal/db" + "github.com/USACE/instrumentation-api/api/internal/dto" + "github.com/USACE/instrumentation-api/api/internal/password" "github.com/google/uuid" ) -type ProfileService interface { - GetProfileWithTokensForClaims(ctx context.Context, claims model.ProfileClaims) (model.Profile, error) - GetProfileWithTokensForEDIPI(ctx context.Context, edipi int) (model.Profile, error) - GetProfileForEmail(ctx context.Context, email string) (model.Profile, error) - GetProfileWithTokensForUsername(ctx context.Context, username string) (model.Profile, error) - GetProfileWithTokensForTokenID(ctx context.Context, tokenID string) (model.Profile, error) - CreateProfile(ctx context.Context, n model.ProfileInfo) (model.Profile, error) - CreateProfileToken(ctx context.Context, profileID uuid.UUID) (model.Token, error) - GetTokenInfoByTokenID(ctx context.Context, tokenID string) (model.TokenInfo, error) - UpdateProfileForClaims(ctx context.Context, p model.Profile, claims model.ProfileClaims) (model.Profile, error) - DeleteToken(ctx context.Context, profileID uuid.UUID, tokenID string) error +type Token struct { + db.ProfileToken + SecretToken string `json:"secret_token"` } -type profileService struct { - db *model.Database - *model.Queries -} - -func NewProfileService(db *model.Database, q *model.Queries) *profileService { - return &profileService{db, q} -} - -func (s profileService) GetProfileWithTokensForClaims(ctx context.Context, claims model.ProfileClaims) (model.Profile, error) { - var p model.Profile +func (s DBService) ProfileGetWithTokensForClaims(ctx context.Context, claims dto.ProfileClaims) (db.VProfile, error) { + var a db.VProfile var err error if claims.CacUID != nil { - p, err = s.GetProfileWithTokensForEDIPI(ctx, *claims.CacUID) + a, err = s.Queries.ProfileGetForEDIPI(ctx, int64(*claims.CacUID)) } else { - p, err = s.GetProfileWithTokensForEmail(ctx, claims.Email) - } - if err != nil { - return model.Profile{}, err - } - return p, nil -} - -func (s profileService) GetProfileWithTokensForEDIPI(ctx context.Context, edipi int) (model.Profile, error) { - p, err := s.GetProfileForEDIPI(ctx, edipi) - if err != nil { - return model.Profile{}, err + a, err = s.ProfileGetForEmail(ctx, claims.Email) } - tokens, err := s.GetIssuedTokens(ctx, p.ID) if err != nil { - return model.Profile{}, err + return a, err } - p.Tokens = tokens - return p, nil + return a, nil } -func (s profileService) GetProfileWithTokensForEmail(ctx context.Context, email string) (model.Profile, error) { - p, err := s.GetProfileForEmail(ctx, email) - if err != nil { - return model.Profile{}, err - } - tokens, err := s.GetIssuedTokens(ctx, p.ID) - if err != nil { - return model.Profile{}, err - } - p.Tokens = tokens - return p, nil -} - -func (s profileService) GetProfileWithTokensForUsername(ctx context.Context, username string) (model.Profile, error) { - p, err := s.GetProfileForUsername(ctx, username) - if err != nil { - return model.Profile{}, err - } - tokens, err := s.GetIssuedTokens(ctx, p.ID) - if err != nil { - return model.Profile{}, err - } - p.Tokens = tokens - return p, nil -} - -// GetProfileForTokenID returns a profile given a token ID -func (s profileService) GetProfileWithTokensForTokenID(ctx context.Context, tokenID string) (model.Profile, error) { - p, err := s.GetProfileForTokenID(ctx, tokenID) - if err != nil { - return model.Profile{}, err - } - tokens, err := s.GetIssuedTokens(ctx, p.ID) - if err != nil { - return model.Profile{}, err - } - p.Tokens = tokens - return p, nil -} - -// UpdateProfileForClaims syncs a database profile to the provided token claims -// THe order of precence in which the function will attepmt to update profiles is edipi, email, username -func (s profileService) UpdateProfileForClaims(ctx context.Context, p model.Profile, claims model.ProfileClaims) (model.Profile, error) { +func (s DBService) ProfileUpdateForClaims(ctx context.Context, p db.VProfile, claims dto.ProfileClaims) (db.VProfile, error) { var claimsMatchProfile bool = p.Username == claims.PreferredUsername && strings.ToLower(p.Email) == strings.ToLower(claims.Email) && p.DisplayName == claims.Name @@ -108,12 +38,12 @@ func (s profileService) UpdateProfileForClaims(ctx context.Context, p model.Prof if claimsMatchProfile { return p, nil } - if claims.CacUID != nil && !claimsMatchProfile { - if err := s.UpdateProfileForEDIPI(ctx, *claims.CacUID, model.ProfileInfo{ + if err := s.Queries.ProfileUpdateForEDIPI(ctx, db.ProfileUpdateForEDIPIParams{ Username: claims.PreferredUsername, - DisplayName: claims.Name, Email: claims.Email, + DisplayName: claims.Name, + Edipi: int64(*claims.CacUID), }); err != nil { return p, err } @@ -123,11 +53,11 @@ func (s profileService) UpdateProfileForClaims(ctx context.Context, p model.Prof return p, nil } - if strings.ToLower(p.Email) == strings.ToLower(claims.Email) && !claimsMatchProfile { - if err := s.UpdateProfileForEmail(ctx, claims.Email, model.ProfileInfo{ + if err := s.Queries.ProfileUpdateForEmail(ctx, db.ProfileUpdateForEmailParams{ Username: claims.PreferredUsername, DisplayName: claims.Name, + Email: claims.Email, }); err != nil { return p, err } @@ -139,3 +69,19 @@ func (s profileService) UpdateProfileForClaims(ctx context.Context, p model.Prof return p, errors.New("claims did not match profile and could not be updated") } + +func (s DBService) ProfileTokenCreate(ctx context.Context, profileID uuid.UUID) (Token, error) { + var t Token + secretToken := password.GenerateRandom(40) + hash, err := password.CreateHash(secretToken, password.DefaultParams) + if err != nil { + return t, err + } + s.Queries.ProfileTokenCreate(ctx, db.ProfileTokenCreateParams{ + ProfileID: profileID, + TokenID: password.GenerateRandom(40), + Hash: hash, + }) + t.SecretToken = secretToken + return t, nil +} diff --git a/api/internal/service/project.go b/api/internal/service/project.go index 90581625..b3cd2831 100644 --- a/api/internal/service/project.go +++ b/api/internal/service/project.go @@ -4,129 +4,121 @@ import ( "context" "image" "io" + "log" "mime/multipart" "os" + "github.com/USACE/instrumentation-api/api/internal/cloud" + "github.com/USACE/instrumentation-api/api/internal/db" + "github.com/USACE/instrumentation-api/api/internal/dto" "github.com/USACE/instrumentation-api/api/internal/img" - "github.com/USACE/instrumentation-api/api/internal/model" "github.com/google/uuid" ) -type ProjectService interface { - SearchProjects(ctx context.Context, searchInput string, limit int) ([]model.SearchResult, error) - ListDistricts(ctx context.Context) ([]model.District, error) - ListProjects(ctx context.Context) ([]model.Project, error) - ListProjectsByFederalID(ctx context.Context, federalID string) ([]model.Project, error) - ListProjectsForProfile(ctx context.Context, profileID uuid.UUID) ([]model.Project, error) - ListProjectsForProfileRole(ctx context.Context, profileID uuid.UUID, role string) ([]model.Project, error) - ListProjectInstruments(ctx context.Context, projectID uuid.UUID) ([]model.Instrument, error) - ListProjectInstrumentGroups(ctx context.Context, projectID uuid.UUID) ([]model.InstrumentGroup, error) - GetProjectCount(ctx context.Context) (model.ProjectCount, error) - GetProject(ctx context.Context, projectID uuid.UUID) (model.Project, error) - CreateProject(ctx context.Context, p model.Project) (model.IDSlugName, error) - CreateProjectBulk(ctx context.Context, projects []model.Project) ([]model.IDSlugName, error) - UpdateProject(ctx context.Context, p model.Project) (model.Project, error) - UploadProjectImage(ctx context.Context, projectID uuid.UUID, file multipart.FileHeader, u uploader) error - DeleteFlagProject(ctx context.Context, projectID uuid.UUID) error -} +type imgUploader func(ctx context.Context, r io.Reader, opts ImgUploaderOpts) error -type projectService struct { - db *model.Database - *model.Queries +type ImgUploaderOpts struct { + rawPath string + bucketName string } -func NewProjectService(db *model.Database, q *model.Queries) *projectService { - return &projectService{db, q} +type ProjectCount struct { + ProjectCount int64 `json:"project_count"` } -type uploader func(ctx context.Context, r io.Reader, rawPath, bucketName string) error - -// CreateProjectBulk creates one or more projects from an array of projects -func (s projectService) CreateProjectBulk(ctx context.Context, projects []model.Project) ([]model.IDSlugName, error) { - tx, err := s.db.BeginTxx(ctx, nil) - if err != nil { - return nil, err - } - defer model.TxDo(tx.Rollback) - - qtx := s.WithTx(tx) +func (s DBService) ProjectGetCount(ctx context.Context) (ProjectCount, error) { + count, err := s.Queries.ProjectGetCount(ctx) + return ProjectCount{count}, err +} - pp := make([]model.IDSlugName, len(projects)) +func (s DBService) ProjectCreateBatch(ctx context.Context, projects []dto.Project) ([]db.ProjectCreateBatchRow, error) { + args := make([]db.ProjectCreateBatchParams, len(projects)) for idx, p := range projects { - aa, err := qtx.CreateProject(ctx, p) - if err != nil { - return nil, err + args[idx] = db.ProjectCreateBatchParams{ + FederalID: p.FederalID, + Name: p.Name, + DistrictID: p.DistrictID, + CreatedBy: p.CreatedBy, + CreatedAt: p.CreatedAt, } - pp[idx] = aa } - if err := tx.Commit(); err != nil { + var err error + pp := make([]db.ProjectCreateBatchRow, len(args)) + s.Queries.ProjectCreateBatch(ctx, args).QueryRow(batchQueryRowCollect(pp, &err)) + if err != nil { return nil, err } return pp, nil } -// UpdateProject updates a project -func (s projectService) UpdateProject(ctx context.Context, p model.Project) (model.Project, error) { - tx, err := s.db.BeginTxx(ctx, nil) +func (s DBService) ProjectUpdate(ctx context.Context, p dto.Project) (db.VProject, error) { + var a db.VProject + tx, err := s.db.Begin(ctx) if err != nil { - return model.Project{}, err + return a, err } - defer model.TxDo(tx.Rollback) - + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) - if err := qtx.UpdateProject(ctx, p); err != nil { - return model.Project{}, err + if _, err := qtx.ProjectUpdate(ctx, db.ProjectUpdateParams{ + ID: p.ID, + Name: p.Name, + UpdatedBy: p.UpdatedBy, + UpdatedAt: p.UpdatedAt, + DistrictID: p.DistrictID, + FederalID: p.FederalID, + }); err != nil { + return a, err } - - updated, err := qtx.GetProject(ctx, p.ID) + updated, err := qtx.ProjectGet(ctx, p.ID) if err != nil { - return model.Project{}, err + return a, err } - - if err := tx.Commit(); err != nil { - return model.Project{}, err + if err := tx.Commit(ctx); err != nil { + return a, err } - return updated, nil } -func (s projectService) UploadProjectImage(ctx context.Context, projectID uuid.UUID, file multipart.FileHeader, u uploader) error { - tx, err := s.db.BeginTxx(ctx, nil) +func (s DBService) ProjectUploadImage(ctx context.Context, projectID uuid.UUID, file multipart.FileHeader, blobService cloud.Blob) error { + tx, err := s.db.Begin(ctx) if err != nil { return err } - defer model.TxDo(tx.Rollback) - + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) - p, err := qtx.GetProject(ctx, projectID) + p, err := qtx.ProjectGet(ctx, projectID) if err != nil { return err } - src, err := file.Open() if err != nil { return err } defer src.Close() - dst, err := os.Create(file.Filename) + fp := "/tmp/" + file.Filename + dst, err := os.Create(fp) if err != nil { return err } defer dst.Close() - + defer func() { + if err := os.Remove(fp); err != nil { + log.Printf("enable to remove file /tmp/%s", fp) + } + }() if err := img.Resize(src, dst, image.Rect(0, 0, 480, 480)); err != nil { return err } - - if err := qtx.UpdateProjectImage(ctx, file.Filename, projectID); err != nil { + if err := qtx.ProjectUpdateImage(ctx, db.ProjectUpdateImageParams{ + ID: projectID, + Image: &file.Filename, + }); err != nil { return err } - - if err := u(ctx, src, "/projects/"+p.Slug+"/"+file.Filename, ""); err != nil { + if err := blobService.UploadContext(ctx, dst, "/projects/"+p.Slug+"/"+file.Filename, ""); err != nil { return err } - - return tx.Commit() + return tx.Commit(ctx) } diff --git a/api/internal/service/project_role.go b/api/internal/service/project_role.go index dbdf0e22..c10078b5 100644 --- a/api/internal/service/project_role.go +++ b/api/internal/service/project_role.go @@ -3,51 +3,28 @@ package service import ( "context" - "github.com/USACE/instrumentation-api/api/internal/model" - "github.com/google/uuid" + "github.com/USACE/instrumentation-api/api/internal/db" ) -type ProjectRoleService interface { - ListProjectMembers(ctx context.Context, projectID uuid.UUID) ([]model.ProjectMembership, error) - GetProjectMembership(ctx context.Context, roleID uuid.UUID) (model.ProjectMembership, error) - AddProjectMemberRole(ctx context.Context, projectID, profileID, roleID, grantedBy uuid.UUID) (model.ProjectMembership, error) - RemoveProjectMemberRole(ctx context.Context, projectID, profileID, roleID uuid.UUID) error - IsProjectAdmin(ctx context.Context, profileID, projectID uuid.UUID) (bool, error) - IsProjectMember(ctx context.Context, profileID, projectID uuid.UUID) (bool, error) -} - -type projectRoleService struct { - db *model.Database - *model.Queries -} - -func NewProjectRoleService(db *model.Database, q *model.Queries) *projectRoleService { - return &projectRoleService{db, q} -} - -// AddProjectMemberRole adds a role to a user for a specific project -func (s projectRoleService) AddProjectMemberRole(ctx context.Context, projectID, profileID, roleID, grantedBy uuid.UUID) (model.ProjectMembership, error) { - tx, err := s.db.BeginTxx(ctx, nil) +func (s DBService) ProfileProjectRoleCreate(ctx context.Context, arg db.ProfileProjectRoleCreateParams) (db.ProfileProjectRoleGetRow, error) { + var a db.ProfileProjectRoleGetRow + tx, err := s.db.Begin(ctx) if err != nil { - return model.ProjectMembership{}, err + return a, err } - defer model.TxDo(tx.Rollback) - + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) - pprID, err := qtx.AddProjectMemberRole(ctx, projectID, profileID, roleID, grantedBy) + newID, err := qtx.ProfileProjectRoleCreate(ctx, arg) if err != nil { - return model.ProjectMembership{}, err + return a, err } - - pm, err := qtx.GetProjectMembership(ctx, pprID) + a, err = qtx.ProfileProjectRoleGet(ctx, newID) if err != nil { - return model.ProjectMembership{}, err + return a, err } - - if err := tx.Commit(); err != nil { - return model.ProjectMembership{}, err + if err := tx.Commit(ctx); err != nil { + return a, err } - - return pm, nil + return a, nil } diff --git a/api/internal/service/report_config.go b/api/internal/service/report_config.go index 1954c9c3..b0754f31 100644 --- a/api/internal/service/report_config.go +++ b/api/internal/service/report_config.go @@ -5,139 +5,171 @@ import ( "encoding/json" "github.com/USACE/instrumentation-api/api/internal/cloud" - "github.com/USACE/instrumentation-api/api/internal/model" + "github.com/USACE/instrumentation-api/api/internal/db" + "github.com/USACE/instrumentation-api/api/internal/dto" "github.com/google/uuid" ) -type ReportConfigService interface { - ListProjectReportConfigs(ctx context.Context, projectID uuid.UUID) ([]model.ReportConfig, error) - CreateReportConfig(ctx context.Context, rc model.ReportConfig) (model.ReportConfig, error) - UpdateReportConfig(ctx context.Context, rc model.ReportConfig) error - DeleteReportConfig(ctx context.Context, rcID uuid.UUID) error - GetReportConfigWithPlotConfigs(ctx context.Context, rcID uuid.UUID) (model.ReportConfigWithPlotConfigs, error) - CreateReportDownloadJob(ctx context.Context, rcID, profileID uuid.UUID, isLandscape bool) (model.ReportDownloadJob, error) - GetReportDownloadJob(ctx context.Context, jobID, profileID uuid.UUID) (model.ReportDownloadJob, error) - UpdateReportDownloadJob(ctx context.Context, j model.ReportDownloadJob) error -} - -type reportConfigService struct { - db *model.Database - *model.Queries - pubsub cloud.Pubsub - mockQueue bool -} - -func NewReportConfigService(db *model.Database, q *model.Queries, ps cloud.Pubsub, mockQueue bool) *reportConfigService { - return &reportConfigService{db, q, ps, mockQueue} -} - -func (s reportConfigService) CreateReportConfig(ctx context.Context, rc model.ReportConfig) (model.ReportConfig, error) { - tx, err := s.db.BeginTxx(ctx, nil) +func (s DBService) ReportConfigCreate(ctx context.Context, rc dto.ReportConfig) (db.VReportConfig, error) { + var a db.VReportConfig + tx, err := s.db.Begin(ctx) if err != nil { - return model.ReportConfig{}, err + return a, err } - defer model.TxDo(tx.Rollback) - + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) - rcID, err := qtx.CreateReportConfig(ctx, rc) + rcID, err := qtx.ReportConfigCreate(ctx, db.ReportConfigCreateParams{ + Name: rc.Name, + ProjectID: rc.ProjectID, + CreatedBy: rc.CreatedBy, + Description: rc.Description, + DateRange: &rc.GlobalOverrides.DateRange.Value, + DateRangeEnabled: &rc.GlobalOverrides.DateRange.Enabled, + ShowMasked: &rc.GlobalOverrides.ShowMasked.Value, + ShowMaskedEnabled: &rc.GlobalOverrides.ShowMasked.Enabled, + ShowNonvalidated: &rc.GlobalOverrides.ShowNonvalidated.Value, + ShowNonvalidatedEnabled: &rc.GlobalOverrides.ShowNonvalidated.Enabled, + }) if err != nil { - return model.ReportConfig{}, err + return a, err } - - for _, pc := range rc.PlotConfigs { - if err := qtx.AssignReportConfigPlotConfig(ctx, rcID, pc.ID); err != nil { - return model.ReportConfig{}, err + args := make([]db.ReportConfigPlotConfigCreateBatchParams, len(rc.PlotConfigs)) + for idx := range rc.PlotConfigs { + args[idx] = db.ReportConfigPlotConfigCreateBatchParams{ + ReportConfigID: rcID, + PlotConfigID: rc.PlotConfigs[idx].ID, } } - - rcNew, err := qtx.GetReportConfigByID(ctx, rcID) + qtx.ReportConfigPlotConfigCreateBatch(ctx, args).Exec(batchExecErr(&err)) if err != nil { - return model.ReportConfig{}, err + return a, err } - - if err := tx.Commit(); err != nil { - return model.ReportConfig{}, err + a, err = qtx.ReportConfigGet(ctx, rcID) + if err != nil { + return a, err } - return rcNew, nil + if err := tx.Commit(ctx); err != nil { + return a, err + } + return a, nil } -func (s reportConfigService) UpdateReportConfig(ctx context.Context, rc model.ReportConfig) error { - tx, err := s.db.BeginTxx(ctx, nil) +func (s DBService) ReportConfigUpdate(ctx context.Context, rc dto.ReportConfig) error { + tx, err := s.db.Begin(ctx) if err != nil { return err } - defer model.TxDo(tx.Rollback) - + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) - if err := qtx.UpdateReportConfig(ctx, rc); err != nil { + if err := qtx.ReportConfigUpdate(ctx, db.ReportConfigUpdateParams{ + ID: rc.ID, + Name: rc.Name, + UpdatedBy: rc.UpdatedBy, + Description: rc.Description, + DateRange: &rc.GlobalOverrides.DateRange.Value, + DateRangeEnabled: &rc.GlobalOverrides.DateRange.Enabled, + ShowMasked: &rc.GlobalOverrides.ShowMasked.Value, + ShowMaskedEnabled: &rc.GlobalOverrides.ShowMasked.Enabled, + ShowNonvalidated: &rc.GlobalOverrides.ShowNonvalidated.Value, + ShowNonvalidatedEnabled: &rc.GlobalOverrides.ShowNonvalidated.Enabled, + }); err != nil { return err } - if err := qtx.UnassignAllReportConfigPlotConfig(ctx, rc.ID); err != nil { + if err := qtx.ReportConfigPlotConfigDeleteForReportConfig(ctx, rc.ID); err != nil { return err } - for _, pc := range rc.PlotConfigs { - if err := qtx.AssignReportConfigPlotConfig(ctx, rc.ID, pc.ID); err != nil { - return err + args := make([]db.ReportConfigPlotConfigCreateBatchParams, len(rc.PlotConfigs)) + for idx := range rc.PlotConfigs { + args[idx] = db.ReportConfigPlotConfigCreateBatchParams{ + ReportConfigID: rc.ID, + PlotConfigID: rc.PlotConfigs[idx].ID, } } + qtx.ReportConfigPlotConfigCreateBatch(ctx, args).Exec(batchExecErr(&err)) + if err != nil { + return err + } - return tx.Commit() + return tx.Commit(ctx) } -func (s reportConfigService) GetReportConfigWithPlotConfigs(ctx context.Context, rcID uuid.UUID) (model.ReportConfigWithPlotConfigs, error) { - q := s.db.Queries() +type ReportConfigWithPlotConfigs struct { + db.VReportConfig + PlotConfigs []db.VPlotConfiguration `json:"plot_configs"` +} - rc, err := q.GetReportConfigByID(ctx, rcID) +func (s DBService) ReportConfigWithPlotConfigsGet(ctx context.Context, rcID uuid.UUID) (ReportConfigWithPlotConfigs, error) { + var a ReportConfigWithPlotConfigs + rc, err := s.Queries.ReportConfigGet(ctx, rcID) if err != nil { - return model.ReportConfigWithPlotConfigs{}, err + return a, err } - pcs, err := q.ListReportConfigPlotConfigs(ctx, rcID) + pcs, err := s.Queries.ReportConfigListForReportConfigWithPlotConfig(ctx, rcID) if err != nil { - return model.ReportConfigWithPlotConfigs{}, err + return a, err } - return model.ReportConfigWithPlotConfigs{ - ReportConfig: rc, - PlotConfigs: pcs, - }, nil + a.VReportConfig = rc + a.PlotConfigs = pcs + return a, nil } -func (s reportConfigService) CreateReportDownloadJob(ctx context.Context, rcID, profileID uuid.UUID, isLandscape bool) (model.ReportDownloadJob, error) { - tx, err := s.db.BeginTxx(ctx, nil) +type ReportDownloadJobCreateOpts struct { + ReportConfigID uuid.UUID + ProfileID uuid.UUID + IsLandscape bool + IsMock bool +} + +func (s DBService) ReportDownloadJobCreate(ctx context.Context, queue cloud.Pubsub, arg ReportDownloadJobCreateOpts) (db.ReportDownloadJob, error) { + var a db.ReportDownloadJob + tx, err := s.db.Begin(ctx) if err != nil { - return model.ReportDownloadJob{}, err + return a, err } - defer model.TxDo(tx.Rollback) - + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) - j, err := qtx.CreateReportDownloadJob(ctx, rcID, profileID) + + a, err = qtx.ReportDownloadJobCreate(ctx, db.ReportDownloadJobCreateParams{ + ReportConfigID: &arg.ReportConfigID, + CreatedBy: arg.ProfileID, + }) if err != nil { - return model.ReportDownloadJob{}, err + return a, err + } + msg := dto.ReportConfigJobMessage{ + ReportConfigID: arg.ReportConfigID, + JobID: a.ID, + IsLandscape: arg.IsLandscape, } - - msg := model.ReportConfigJobMessage{ReportConfigID: rcID, JobID: j.ID, IsLandscape: isLandscape} b, err := json.Marshal(msg) if err != nil { - return model.ReportDownloadJob{}, err + return a, err } - - // NOTE: Depending on how long this takes, possibly invoke the lambdas directly - if _, err := s.pubsub.PublishMessage(ctx, b); err != nil { - return model.ReportDownloadJob{}, err + if _, err := queue.PublishMessage(ctx, b); err != nil { + return a, err } - - if err := tx.Commit(); err != nil { - return model.ReportDownloadJob{}, err + if err := tx.Commit(ctx); err != nil { + return a, err } - - if s.mockQueue { - if _, err := s.pubsub.MockPublishMessage(ctx, b); err != nil { - return model.ReportDownloadJob{}, err + if arg.IsMock { + if _, err := queue.MockPublishMessage(ctx, b); err != nil { + return a, err } } + return a, nil +} - return j, nil +func (s DBService) ReportDownloadJobUpdate(ctx context.Context, j dto.ReportDownloadJob) error { + return s.Queries.ReportDownloadJobUpdate(ctx, db.ReportDownloadJobUpdateParams{ + ID: j.ID, + Status: db.JobStatus(j.Status), + Progress: int32(j.Progress), + ProgressUpdatedAt: j.ProgressUpdatedAt, + FileKey: j.FileKey, + FileExpiry: j.FileExpiry, + }) } diff --git a/api/internal/service/submittal.go b/api/internal/service/submittal.go deleted file mode 100644 index 3a73147c..00000000 --- a/api/internal/service/submittal.go +++ /dev/null @@ -1,27 +0,0 @@ -package service - -import ( - "context" - - "github.com/USACE/instrumentation-api/api/internal/model" - "github.com/google/uuid" -) - -type SubmittalService interface { - ListProjectSubmittals(ctx context.Context, projectID uuid.UUID, showMissing bool) ([]model.Submittal, error) - ListInstrumentSubmittals(ctx context.Context, instrumentID uuid.UUID, showMissing bool) ([]model.Submittal, error) - ListAlertConfigSubmittals(ctx context.Context, alertConfigID uuid.UUID, showMissing bool) ([]model.Submittal, error) - ListUnverifiedMissingSubmittals(ctx context.Context) ([]model.Submittal, error) - UpdateSubmittal(ctx context.Context, sub model.Submittal) error - VerifyMissingSubmittal(ctx context.Context, submittalID uuid.UUID) error - VerifyMissingAlertConfigSubmittals(ctx context.Context, alertConfigID uuid.UUID) error -} - -type submittalService struct { - db *model.Database - *model.Queries -} - -func NewSubmittalService(db *model.Database, q *model.Queries) *submittalService { - return &submittalService{db, q} -} diff --git a/api/internal/service/timeseries.go b/api/internal/service/timeseries.go index 692c1126..2e479f9d 100644 --- a/api/internal/service/timeseries.go +++ b/api/internal/service/timeseries.go @@ -2,84 +2,54 @@ package service import ( "context" - "errors" - "github.com/USACE/instrumentation-api/api/internal/model" + "github.com/USACE/instrumentation-api/api/internal/db" + "github.com/USACE/instrumentation-api/api/internal/dto" "github.com/google/uuid" ) -type TimeseriesService interface { - GetStoredTimeseriesExists(ctx context.Context, timeseriesID uuid.UUID) (bool, error) - AssertTimeseriesLinkedToProject(ctx context.Context, projectID uuid.UUID, dd map[uuid.UUID]struct{}) error - ListProjectTimeseries(ctx context.Context, projectID uuid.UUID) ([]model.Timeseries, error) - ListInstrumentTimeseries(ctx context.Context, instrumentID uuid.UUID) ([]model.Timeseries, error) - ListInstrumentGroupTimeseries(ctx context.Context, instrumentGroupID uuid.UUID) ([]model.Timeseries, error) - GetTimeseries(ctx context.Context, timeseriesID uuid.UUID) (model.Timeseries, error) - CreateTimeseries(ctx context.Context, ts model.Timeseries) (model.Timeseries, error) - CreateTimeseriesBatch(ctx context.Context, tt []model.Timeseries) ([]model.Timeseries, error) - UpdateTimeseries(ctx context.Context, ts model.Timeseries) (uuid.UUID, error) - DeleteTimeseries(ctx context.Context, timeseriesID uuid.UUID) error -} - -type timeseriesService struct { - db *model.Database - *model.Queries -} - -func NewTimeseriesService(db *model.Database, q *model.Queries) *timeseriesService { - return ×eriesService{db, q} -} - -func (s timeseriesService) CreateTimeseriesBatch(ctx context.Context, tt []model.Timeseries) ([]model.Timeseries, error) { - tx, err := s.db.BeginTxx(ctx, nil) - if err != nil { - return nil, err - } - defer model.TxDo(tx.Rollback) - - qtx := s.WithTx(tx) - - uu := make([]model.Timeseries, len(tt)) +func (s DBService) TimeseriesCreateBatch(ctx context.Context, tt []dto.Timeseries) error { + uu := make([]db.TimeseriesCreateBatchParams, len(tt)) for idx, ts := range tt { - ts.Type = model.StandardTimeseriesType - tsNew, err := qtx.CreateTimeseries(ctx, ts) - if err != nil { - return nil, err + if ts.ParameterID == uuid.Nil { + ts.ParameterID = dto.UnknownParameterID + } + if ts.UnitID == uuid.Nil { + ts.UnitID = dto.UnknownUnitID + } + uu[idx] = db.TimeseriesCreateBatchParams{ + InstrumentID: &ts.InstrumentID, + Name: ts.Name, + ParameterID: ts.ParameterID, + UnitID: ts.UnitID, + Type: db.TimeseriesTypeStandard, } - uu[idx] = tsNew - } - - if err := tx.Commit(); err != nil { - return nil, err } - - return uu, nil + var err error + s.Queries.TimeseriesCreateBatch(ctx, uu).QueryRow(batchQueryRowErr[db.TimeseriesCreateBatchRow](&err)) + return err } -func (s timeseriesService) AssertTimeseriesLinkedToProject(ctx context.Context, projectID uuid.UUID, dd map[uuid.UUID]struct{}) error { - ddc := make(map[uuid.UUID]struct{}, len(dd)) - dds := make([]uuid.UUID, len(dd)) - idx := 0 - for k := range ddc { - ddc[k] = struct{}{} - dds[idx] = k - idx++ - } - - q := s.db.Queries() - - m, err := q.GetTimeseriesProjectMap(ctx, dds) +func (s DBService) TimeseriesCreate(ctx context.Context, ts dto.Timeseries) (uuid.UUID, error) { + tsNew, err := s.Queries.TimeseriesCreate(ctx, db.TimeseriesCreateParams{ + InstrumentID: &ts.InstrumentID, + Name: ts.Name, + ParameterID: ts.ParameterID, + UnitID: ts.UnitID, + Type: db.TimeseriesTypeStandard, + }) if err != nil { - return err + return uuid.Nil, err } - for tID := range ddc { - ppID, ok := m[tID] - if ok && ppID == projectID { - delete(ddc, tID) - } - } - if len(ddc) != 0 { - return errors.New("instruments for all timeseries must be linked to project") - } - return nil + return tsNew.ID, nil +} + +func (s DBService) TimeseriesUpdate(ctx context.Context, ts dto.Timeseries) error { + return s.Queries.TimeseriesUpdate(ctx, db.TimeseriesUpdateParams{ + ID: ts.ID, + InstrumentID: &ts.InstrumentID, + Name: ts.Name, + ParameterID: ts.ParameterID, + UnitID: ts.UnitID, + }) } diff --git a/api/internal/service/timeseries_calculated.go b/api/internal/service/timeseries_calculated.go index 5c55a08c..57146c32 100644 --- a/api/internal/service/timeseries_calculated.go +++ b/api/internal/service/timeseries_calculated.go @@ -2,98 +2,67 @@ package service import ( "context" - "database/sql" - "errors" - "github.com/USACE/instrumentation-api/api/internal/model" + "github.com/USACE/instrumentation-api/api/internal/db" + "github.com/USACE/instrumentation-api/api/internal/dto" "github.com/google/uuid" ) -type CalculatedTimeseriesService interface { - GetAllCalculatedTimeseriesForInstrument(ctx context.Context, instrumentID uuid.UUID) ([]model.CalculatedTimeseries, error) - CreateCalculatedTimeseries(ctx context.Context, cc model.CalculatedTimeseries) error - UpdateCalculatedTimeseries(ctx context.Context, cts model.CalculatedTimeseries) error - DeleteCalculatedTimeseries(ctx context.Context, ctsID uuid.UUID) error -} - -type calculatedTimeseriesService struct { - db *model.Database - *model.Queries -} - -func NewCalculatedTimeseriesService(db *model.Database, q *model.Queries) *calculatedTimeseriesService { - return &calculatedTimeseriesService{db, q} -} - -func (s calculatedTimeseriesService) CreateCalculatedTimeseries(ctx context.Context, cc model.CalculatedTimeseries) error { - tx, err := s.db.BeginTxx(ctx, nil) +func (s DBService) TimeseriesComputedCreate(ctx context.Context, ct dto.CalculatedTimeseries) error { + tx, err := s.db.Begin(ctx) if err != nil { return err } - defer model.TxDo(tx.Rollback) - + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) - tsID, err := qtx.CreateCalculatedTimeseries(ctx, cc) + tsID, err := qtx.TimeseriesComputedCreate(ctx, db.TimeseriesComputedCreateParams{ + InstrumentID: &ct.InstrumentID, + ParameterID: ct.ParameterID, + UnitID: ct.UnitID, + Name: ct.FormulaName, + }) if err != nil { return err } - - if err := qtx.CreateCalculation(ctx, tsID, cc.Formula); err != nil { - return err - } - - if err := tx.Commit(); err != nil { + if err := qtx.CalculationCreate(ctx, db.CalculationCreateParams{ + TimeseriesID: tsID, + Contents: &ct.Formula, + }); err != nil { return err } - - return nil + return tx.Commit(ctx) } -func (s calculatedTimeseriesService) UpdateCalculatedTimeseries(ctx context.Context, cts model.CalculatedTimeseries) error { - tx, err := s.db.BeginTxx(ctx, nil) +func (s DBService) TimeseriesComputedUpdate(ctx context.Context, ct dto.CalculatedTimeseries) error { + tx, err := s.db.Begin(ctx) if err != nil { return err } - defer model.TxDo(tx.Rollback) - + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) - defaultCts, err := qtx.GetOneCalculation(ctx, &cts.ID) - if err != nil { - return err - } - - if cts.InstrumentID == uuid.Nil { - cts.InstrumentID = defaultCts.InstrumentID - } - if cts.ParameterID == uuid.Nil { - cts.ParameterID = defaultCts.ParameterID - } - if cts.UnitID == uuid.Nil { - cts.UnitID = defaultCts.UnitID - } - if cts.Slug == "" { - cts.Slug = defaultCts.Slug + if ct.ParameterID == uuid.Nil { + ct.ParameterID = dto.UnknownParameterID } - if cts.FormulaName == "" { - cts.FormulaName = defaultCts.FormulaName - } - if cts.Formula == "" { - cts.Formula = defaultCts.Formula - } - - if err := qtx.CreateOrUpdateCalculatedTimeseries(ctx, cts, defaultCts); err != nil && !errors.Is(err, sql.ErrNoRows) { - return err + if ct.UnitID == uuid.Nil { + ct.UnitID = dto.UnknownUnitID } - if err := qtx.CreateOrUpdateCalculation(ctx, cts.ID, cts.Formula, defaultCts.Formula); err != nil && !errors.Is(err, sql.ErrNoRows) { + if err := qtx.TimeseriesComputedUpdate(ctx, db.TimeseriesComputedUpdateParams{ + ID: ct.ID, + ParameterID: ct.ParameterID, + UnitID: ct.UnitID, + Name: ct.FormulaName, + Slug: ct.Slug, + }); err != nil { return err } - - if err := tx.Commit(); err != nil { + if err := qtx.CalculationUpdate(ctx, db.CalculationUpdateParams{ + TimeseriesID: ct.ID, + Contents: &ct.Formula, + }); err != nil { return err } - - return nil + return tx.Commit(ctx) } diff --git a/api/internal/service/timeseries_cwms.go b/api/internal/service/timeseries_cwms.go index b2d0f2d0..477269e0 100644 --- a/api/internal/service/timeseries_cwms.go +++ b/api/internal/service/timeseries_cwms.go @@ -3,70 +3,78 @@ package service import ( "context" - "github.com/USACE/instrumentation-api/api/internal/model" + "github.com/USACE/instrumentation-api/api/internal/db" + "github.com/USACE/instrumentation-api/api/internal/dto" "github.com/google/uuid" ) -type TimeseriesCwmsService interface { - ListTimeseriesCwms(ctx context.Context, instrumentID uuid.UUID) ([]model.TimeseriesCwms, error) - CreateTimeseriesCwmsBatch(ctx context.Context, instrumentID uuid.UUID, tcc []model.TimeseriesCwms) ([]model.TimeseriesCwms, error) - UpdateTimeseriesCwms(ctx context.Context, tsCwms model.TimeseriesCwms) error -} - -type timeseriesCwmsService struct { - db *model.Database - *model.Queries -} - -func NewTimeseriesCwmsService(db *model.Database, q *model.Queries) *timeseriesCwmsService { - return ×eriesCwmsService{db, q} -} - -func (s timeseriesCwmsService) CreateTimeseriesCwmsBatch(ctx context.Context, instrumentID uuid.UUID, tcc []model.TimeseriesCwms) ([]model.TimeseriesCwms, error) { - tx, err := s.db.BeginTxx(ctx, nil) +func (s DBService) TimeseriesCwmsCreateBatch(ctx context.Context, instrumentID uuid.UUID, tcc []dto.TimeseriesCwms) error { + tx, err := s.db.Begin(ctx) if err != nil { - return tcc, err + return err } - defer model.TxDo(tx.Rollback) - + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) - for idx := range tcc { - tcc[idx].Type = model.CwmsTimeseriesType - tcc[idx].InstrumentID = instrumentID - tsNew, err := qtx.CreateTimeseries(ctx, tcc[idx].Timeseries) + for idx, tc := range tcc { + if tc.ParameterID == uuid.Nil { + tc.ParameterID = dto.UnknownParameterID + } + if tc.UnitID == uuid.Nil { + tc.UnitID = dto.UnknownUnitID + } + tcc[idx].Type = dto.CwmsTimeseriesType + tsNew, err := qtx.TimeseriesCreate(ctx, db.TimeseriesCreateParams{ + InstrumentID: &instrumentID, + Name: tc.Name, + ParameterID: tc.ParameterID, + UnitID: tc.UnitID, + Type: db.TimeseriesTypeCwms, + }) if err != nil { - return tcc, err + return err } - tcc[idx].Timeseries = tsNew - if err := qtx.CreateTimeseriesCwms(ctx, tcc[idx]); err != nil { - return tcc, err + if err := qtx.TimeseriesCwmsCreate(ctx, db.TimeseriesCwmsCreateParams{ + TimeseriesID: tsNew.ID, + CwmsTimeseriesID: tc.CwmsTimeseriesID, + CwmsOfficeID: tc.CwmsOfficeID, + CwmsExtentEarliestTime: tc.CwmsExtentEarliestTime, + CwmsExtentLatestTime: tc.CwmsExtentLatestTime, + }); err != nil { + return err } } - - if err := tx.Commit(); err != nil { - return tcc, err - } - - return tcc, nil + return tx.Commit(ctx) } -func (s timeseriesCwmsService) UpdateTimeseriesCwms(ctx context.Context, tsCwms model.TimeseriesCwms) error { - tx, err := s.db.BeginTxx(ctx, nil) +func (s DBService) TimeseriesCwmsUpdate(ctx context.Context, ts dto.TimeseriesCwms) error { + tx, err := s.db.Begin(ctx) if err != nil { return err } - defer model.TxDo(tx.Rollback) + defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) - if _, err := qtx.UpdateTimeseries(ctx, tsCwms.Timeseries); err != nil { + if err := qtx.TimeseriesUpdate(ctx, db.TimeseriesUpdateParams{ + ID: ts.ID, + Name: ts.Name, + InstrumentID: &ts.InstrumentID, + ParameterID: ts.ParameterID, + UnitID: ts.UnitID, + }); err != nil { return err } - if err := qtx.UpdateTimeseriesCwms(ctx, tsCwms); err != nil { + if err := qtx.TimeseriesCwmsUpdate(ctx, db.TimeseriesCwmsUpdateParams{ + TimeseriesID: ts.ID, + CwmsTimeseriesID: ts.CwmsTimeseriesID, + CwmsOfficeID: ts.CwmsOfficeID, + CwmsExtentEarliestTime: ts.CwmsExtentEarliestTime, + CwmsExtentLatestTime: ts.CwmsExtentLatestTime, + }); err != nil { return err } - return tx.Commit() + return tx.Commit(ctx) } diff --git a/api/internal/service/timeseries_process.go b/api/internal/service/timeseries_process.go deleted file mode 100644 index 671a2ce5..00000000 --- a/api/internal/service/timeseries_process.go +++ /dev/null @@ -1,21 +0,0 @@ -package service - -import ( - "context" - - "github.com/USACE/instrumentation-api/api/internal/model" -) - -type ProcessTimeseriesService interface { - SelectMeasurements(ctx context.Context, f model.ProcessMeasurementFilter) (model.ProcessTimeseriesResponseCollection, error) - SelectInclinometerMeasurements(ctx context.Context, f model.ProcessMeasurementFilter) (model.ProcessInclinometerTimeseriesResponseCollection, error) -} - -type processTimeseriesService struct { - db *model.Database - *model.Queries -} - -func NewProcessTimeseriesService(db *model.Database, q *model.Queries) *processTimeseriesService { - return &processTimeseriesService{db, q} -} diff --git a/api/internal/service/unit.go b/api/internal/service/unit.go deleted file mode 100644 index efea81bf..00000000 --- a/api/internal/service/unit.go +++ /dev/null @@ -1,20 +0,0 @@ -package service - -import ( - "context" - - "github.com/USACE/instrumentation-api/api/internal/model" -) - -type UnitService interface { - ListUnits(ctx context.Context) ([]model.Unit, error) -} - -type unitService struct { - db *model.Database - *model.Queries -} - -func NewUnitService(db *model.Database, q *model.Queries) *unitService { - return &unitService{db, q} -} diff --git a/api/internal/service/uploader.go b/api/internal/service/uploader.go new file mode 100644 index 00000000..1cf45c81 --- /dev/null +++ b/api/internal/service/uploader.go @@ -0,0 +1,124 @@ +package service + +import ( + "context" + "errors" + "io" + + "github.com/USACE/instrumentation-api/api/internal/db" + "github.com/USACE/instrumentation-api/api/internal/dto" + "github.com/google/uuid" +) + +func (s DBService) UploaderConfigCreate(ctx context.Context, uc dto.UploaderConfig) (uuid.UUID, error) { + return s.Queries.UploaderConfigCreate(ctx, db.UploaderConfigCreateParams{ + ProjectID: uc.ProjectID, + Name: uc.Name, + Description: uc.Description, + Type: db.UploaderConfigType(uc.Type), + TzName: uc.TzName, + CreatedBy: uc.CreatedBy, + CreatedAt: uc.CreatedAt, + TimeField: uc.TimeField, + ValidatedFieldEnabled: uc.ValidatedFieldEnabled, + ValidatedField: uc.ValidatedField, + MaskedFieldEnabled: uc.MaskedFieldEnabled, + MaskedField: uc.MaskedField, + CommentFieldEnabled: uc.CommentFieldEnabled, + CommentField: uc.CommentField, + ColumnOffset: uc.ColumnOffset, + RowOffset: uc.RowOffset, + }) +} + +func (s DBService) UploaderConfigUpdate(ctx context.Context, uc dto.UploaderConfig) error { + return s.Queries.UploaderConfigUpdate(ctx, db.UploaderConfigUpdateParams{ + ID: uc.ID, + Name: uc.Name, + Description: uc.Description, + Type: db.UploaderConfigType(uc.Type), + TzName: uc.TzName, + UpdatedBy: uc.UpdatedBy, + UpdatedAt: uc.UpdatedAt, + TimeField: uc.TimeField, + ValidatedFieldEnabled: uc.ValidatedFieldEnabled, + ValidatedField: uc.ValidatedField, + MaskedFieldEnabled: uc.MaskedFieldEnabled, + MaskedField: uc.MaskedField, + CommentFieldEnabled: uc.CommentFieldEnabled, + CommentField: uc.CommentField, + ColumnOffset: uc.ColumnOffset, + RowOffset: uc.RowOffset, + }) +} + +func (s DBService) UploaderConfigMappingCreateBatch(ctx context.Context, ucID uuid.UUID, mm []dto.UploaderConfigMapping) error { + args := make([]db.UploaderConfigMappingCreateBatchParams, len(mm)) + for idx, m := range mm { + args[idx] = db.UploaderConfigMappingCreateBatchParams{ + UploaderConfigID: ucID, + FieldName: m.FieldName, + TimeseriesID: m.TimeseriesID, + } + } + var err error + s.Queries.UploaderConfigMappingCreateBatch(ctx, args).Exec(batchExecErr(&err)) + return err +} + +func (s DBService) UploaderConfigMappingUpdateBatch(ctx context.Context, ucID uuid.UUID, mm []dto.UploaderConfigMapping) error { + tx, err := s.db.Begin(ctx) + if err != nil { + return err + } + defer txDo(ctx, tx.Rollback) + qtx := s.WithTx(tx) + + if err := qtx.UploaderConfigMappingDeleteForUploaderConfig(ctx, ucID); err != nil { + return err + } + + args := make([]db.UploaderConfigMappingCreateBatchParams, len(mm)) + for idx, m := range mm { + args[idx] = db.UploaderConfigMappingCreateBatchParams{ + UploaderConfigID: ucID, + FieldName: m.FieldName, + TimeseriesID: m.TimeseriesID, + } + } + qtx.UploaderConfigMappingCreateBatch(ctx, args).Exec(batchExecErr(&err)) + if err != nil { + return err + } + return tx.Commit(ctx) +} + +func (s DBService) UploaderConfigUploadFile(ctx context.Context, projectID, ucID uuid.UUID, r io.Reader) error { + tx, err := s.db.Begin(ctx) + if err != nil { + return err + } + defer txDo(ctx, tx.Rollback) + qtx := s.WithTx(tx) + + uc, err := qtx.UploaderConfigGet(ctx, ucID) + if err != nil { + return err + } + ucm, err := qtx.UploaderConfigMappingList(ctx, uc.ID) + if err != nil { + return err + } + switch uc.Type { + case db.UploaderConfigTypeCsv: + err = s.TimeseriesMeasurementCreateBatchFromCSVFile(ctx, r, uc, ucm) + case db.UploaderConfigTypeToa5: + err = s.TimeseriesMeasurementCreateBatchFromTOA5File(ctx, r, uc, ucm) + case db.UploaderConfigTypeDux: + err = s.TimeseriesMeasurementCreateBatchFromDuxFile(ctx, r, uc, ucm) + default: + return errors.New("not implemented") + } + + return tx.Commit(ctx) +} diff --git a/api/internal/service/uploader_parser.go b/api/internal/service/uploader_parser.go new file mode 100644 index 00000000..c0325863 --- /dev/null +++ b/api/internal/service/uploader_parser.go @@ -0,0 +1,228 @@ +package service + +import ( + "context" + "encoding/csv" + "errors" + "io" + "math" + "strconv" + "strings" + "time" + + "github.com/USACE/instrumentation-api/api/internal/db" + "github.com/google/uuid" + "github.com/jackc/pgx/v5/pgtype" +) + +type timeseriesMeasurementNoteCreateOrUpdateBatchParams struct { + fields []string + fieldNameTimeseriesIDMap map[string]uuid.UUID + maskedColIdx int + validatedColIdx int + commentColIdx int + timezone string + timeFieldIdx int + colOffset int + rowOffset int +} + +// non-datalogger toa5/dat parser (use uploader config) +func (s DBService) TimeseriesMeasurementCreateBatchFromTOA5File(ctx context.Context, r io.Reader, uc db.VUploaderConfig, ucm []db.UploaderConfigMapping) error { + reader := csv.NewReader(r) + _, err := reader.Read() + if err != nil { + return err + } + fieldHeader, err := reader.Read() + if err != nil { + return err + } + _, err = reader.Read() + if err != nil { + return err + } + _, err = reader.Read() + if err != nil { + return err + } + + // first two columns are timestamp and record number + // we only want to collect the measurement fields here + fields := make([]string, len(fieldHeader)-2) + for i := 2; i < len(fieldHeader); i++ { + fields[i] = fieldHeader[i] + } + + fieldNameTimeseriesIDMap := make(map[string]uuid.UUID) + for _, mapping := range ucm { + if mapping.TimeseriesID == nil { + continue + } + fieldNameTimeseriesIDMap[mapping.FieldName] = *mapping.TimeseriesID + } + + if err := timeseriesMeasurementNoteCreateOrUpdateBatch(ctx, s.Queries, reader, timeseriesMeasurementNoteCreateOrUpdateBatchParams{ + fields: fields, + fieldNameTimeseriesIDMap: fieldNameTimeseriesIDMap, + timezone: uc.TzName, + colOffset: 2, + }); err != nil { + return err + } + return nil +} + +// cusom csv parser +func (s DBService) TimeseriesMeasurementCreateBatchFromCSVFile(ctx context.Context, r io.Reader, uc db.VUploaderConfig, ucm []db.UploaderConfigMapping) error { + reader := csv.NewReader(r) + fieldHeader, err := reader.Read() + if err != nil { + return err + } + + timeFieldIdx := -1 + + fields := make([]string, len(fieldHeader)) + for idx := range fieldHeader { + header := fieldHeader[idx] + switch { + case uc.TimeField == header: + timeFieldIdx = idx + default: + fields[idx] = header + } + } + + if timeFieldIdx == -1 { + return errors.New("time field specified in uploader config does not exist") + } + + fieldNameTimeseriesIDMap := make(map[string]uuid.UUID) + for _, mapping := range ucm { + if mapping.TimeseriesID == nil { + continue + } + fieldNameTimeseriesIDMap[mapping.FieldName] = *mapping.TimeseriesID + } + + if err := timeseriesMeasurementNoteCreateOrUpdateBatch(ctx, s.Queries, reader, timeseriesMeasurementNoteCreateOrUpdateBatchParams{ + fields: fields, + fieldNameTimeseriesIDMap: fieldNameTimeseriesIDMap, + timezone: uc.TzName, + }); err != nil { + return err + } + return nil +} + +func (s DBService) TimeseriesMeasurementCreateBatchFromDuxFile(ctx context.Context, r io.Reader, uc db.VUploaderConfig, ucm []db.UploaderConfigMapping) error { + return errors.New("not implemented") +} + +func timeseriesMeasurementNoteCreateOrUpdateBatch(ctx context.Context, q *db.Queries, csvReader *csv.Reader, arg timeseriesMeasurementNoteCreateOrUpdateBatchParams) error { + chunkSize := 1_000 + createMmtParams := make([]db.TimeseriesMeasurementCreateOrUpdateAtTimezoneBatchParams, chunkSize) + createNoteParams := make([]db.TimeseriesNoteCreateOrUpdateAtTimezoneBatchParams, chunkSize) + var mmtIdx, noteIdx int + for { + record, err := csvReader.Read() + if err == io.EOF { + break + } + if err != nil { + return err + } + + // TODO: do we need to accept other time formats? For now RFC3339 only + t, err := time.Parse(record[arg.timeFieldIdx], time.RFC3339) + if err != nil { + return err + } + + // get notes content and apply to all timeseries in loop + // TODO: this is not space efficient and should change to a range-based approach + hasNotes := arg.maskedColIdx != -1 || arg.validatedColIdx != -1 || arg.commentColIdx != -1 + + for idx, cell := range record[arg.colOffset:] { + fieldName := arg.fields[idx] + tsID, ok := arg.fieldNameTimeseriesIDMap[fieldName] + if ok { + continue + } + v, err := strconv.ParseFloat(cell, 64) + if err != nil { + continue + } + createMmtParams[mmtIdx] = db.TimeseriesMeasurementCreateOrUpdateAtTimezoneBatchParams{ + TimeseriesID: tsID, + LocalTime: pgtype.Timestamp{Time: t, Valid: true}, + Timezone: arg.timezone, + Value: v, + } + mmtIdx++ + if mmtIdx == chunkSize { + var err error + q.TimeseriesMeasurementCreateOrUpdateAtTimezoneBatch(ctx, createMmtParams).Exec(batchExecErr(&err)) + if err != nil { + return err + } + mmtIdx = 0 + } + + // automask NaN values + hasNaN := math.IsNaN(v) || math.IsInf(v, 0) + if hasNotes || hasNaN { + var masked *bool + var validated *bool + var comment *string + if arg.maskedColIdx != -1 { + maskedVal := strings.ToLower(record[arg.maskedColIdx]) == "true" + masked = &maskedVal + } else if hasNaN { + masked = &hasNaN + } + if arg.validatedColIdx != -1 { + validatedVal := strings.ToLower(record[arg.validatedColIdx]) == "true" + validated = &validatedVal + } + if arg.commentColIdx != -1 { + commentVal := strings.ToLower(record[arg.validatedColIdx]) + comment = &commentVal + } + createNoteParams[noteIdx] = db.TimeseriesNoteCreateOrUpdateAtTimezoneBatchParams{ + TimeseriesID: tsID, + LocalTime: pgtype.Timestamp{Time: t, Valid: true}, + Timezone: arg.timezone, + Masked: masked, + Validated: validated, + Annotation: comment, + } + noteIdx++ + if noteIdx == chunkSize { + var err error + q.TimeseriesNoteCreateOrUpdateAtTimezoneBatch(ctx, createNoteParams).Exec(batchExecErr(&err)) + if err != nil { + return err + } + noteIdx = 0 + } + } + } + } + if mmtIdx != 0 { + var err error + q.TimeseriesMeasurementCreateOrUpdateAtTimezoneBatch(ctx, createMmtParams[:mmtIdx]).Exec(batchExecErr(&err)) + if err != nil { + return err + } + } + if noteIdx != 0 { + var err error + q.TimeseriesNoteCreateOrUpdateAtTimezoneBatch(ctx, createNoteParams[:noteIdx]).Exec(batchExecErr(&err)) + if err != nil { + return err + } + } + return nil +} diff --git a/api/internal/model/timewindow.go b/api/internal/util/timewindow.go similarity index 98% rename from api/internal/model/timewindow.go rename to api/internal/util/timewindow.go index 39e37f40..506d938e 100644 --- a/api/internal/model/timewindow.go +++ b/api/internal/util/timewindow.go @@ -1,4 +1,4 @@ -package model +package util import "time" diff --git a/api/migrations/repeat/0020__views_profiles.sql b/api/migrations/repeat/0020__views_profiles.sql index e6a06c32..6f4e4565 100644 --- a/api/migrations/repeat/0020__views_profiles.sql +++ b/api/migrations/repeat/0020__views_profiles.sql @@ -15,9 +15,18 @@ CREATE OR REPLACE VIEW v_profile AS ( p.display_name, p.email, p.is_admin, - COALESCE(r.roles,'{}') AS roles + COALESCE(r.roles,'{}')::text[] AS roles, + COALESCE(sq.tokens, '[]'::jsonb) AS tokens FROM profile p LEFT JOIN roles_by_profile r ON r.profile_id = p.id + LEFT JOIN LATERAL ( + SELECT jsonb_agg(jsonb_build_object( + 'token_id', pt.token_id, + 'issued', pt.issued + )) tokens + FROM profile_token pt + WHERE pt.profile_id = p.id + ) sq ON true ); CREATE OR REPLACE VIEW v_profile_project_roles AS ( diff --git a/api/migrations/repeat/0030__views_projects.sql b/api/migrations/repeat/0030__views_projects.sql index 199c6af6..e091f11a 100644 --- a/api/migrations/repeat/0030__views_projects.sql +++ b/api/migrations/repeat/0030__views_projects.sql @@ -5,23 +5,22 @@ CREATE OR REPLACE VIEW v_project AS ( CASE WHEN p.image IS NOT NULL THEN cfg.static_host || '/projects/' || p.slug || '/images/' || p.image ELSE NULL - END AS image, + END image, p.district_id, d.office_id, - p.deleted, p.slug, p.name, - p.creator, - u.username AS creator_username, - p.create_date, - p.updater, - u.username AS updater_username, - p.update_date, - COALESCE(i.count, 0) AS instrument_count, - COALESCE(g.count, 0) AS instrument_group_count + p.created_by, + COALESCE(u.username, 'midas') created_by_username, + p.created_at, + p.updated_by, + u.username updated_by_username, + p.updated_at, + COALESCE(i.count, 0) instrument_count, + COALESCE(g.count, 0) instrument_group_count FROM project p - LEFT JOIN profile c ON p.creator = c.id - LEFT JOIN profile u ON p.updater = c.id + LEFT JOIN profile c ON p.created_by = c.id + LEFT JOIN profile u ON p.updated_by = c.id LEFT JOIN ( SELECT pi.project_id, COUNT(pi.*) as count FROM project_instrument pi @@ -42,18 +41,21 @@ CREATE OR REPLACE VIEW v_project AS ( SELECT static_host FROM config LIMIT 1 ) cfg ON true + WHERE NOT deleted + ORDER BY name ); CREATE OR REPLACE VIEW v_district AS ( SELECT - ag.name AS agency, - dis.id AS id, - dis.name AS name, - dis.initials AS initials, - div.name AS division_name, - div.initials AS division_initials, - dis.office_id AS office_id + ag.name agency, + dis.id, + dis.name, + dis.initials, + div.name division_name, + div.initials division_initials, + dis.office_id FROM district dis INNER JOIN division div ON dis.division_id = div.id INNER JOIN agency ag ON ag.id = div.agency_id + ORDER BY ag.name, div.name, dis.name ); diff --git a/api/migrations/repeat/0040__views_instruments.sql b/api/migrations/repeat/0040__views_instruments.sql index 3d9fdedd..3832c27e 100644 --- a/api/migrations/repeat/0040__views_instruments.sql +++ b/api/migrations/repeat/0040__views_instruments.sql @@ -1,9 +1,9 @@ CREATE OR REPLACE VIEW v_instrument_telemetry AS ( SELECT a.id, - a.instrument_id AS instrument_id, - b.id AS telemetry_type_id, - b.slug AS telemetry_type_slug, - b.name AS telemetry_type_name + a.instrument_id, + b.id telemetry_type_id, + b.slug telemetry_type_slug, + b.name telemetry_type_name FROM instrument_telemetry a INNER JOIN telemetry_type b ON b.id = a.telemetry_type_id LEFT JOIN telemetry_goes tg ON a.telemetry_id = tg.id @@ -13,7 +13,6 @@ CREATE OR REPLACE VIEW v_instrument_telemetry AS ( CREATE OR REPLACE VIEW v_instrument AS ( SELECT i.id, - i.deleted, s.status_id, s.status, s.status_time, @@ -21,29 +20,29 @@ CREATE OR REPLACE VIEW v_instrument AS ( i.name, i.type_id, i.show_cwms_tab, - t.name AS type, - t.icon AS icon, - ST_AsBinary(i.geometry) AS geometry, + t.name "type", + t.icon, + ST_AsGeoJSON(i.geometry)::json geometry, i.station, - i.station_offset, - i.creator, - i.create_date, - i.updater, - i.update_date, + i.station_offset "offset", + i.created_by, + i.created_at, + i.updated_by, + i.updated_at, i.nid_id, i.usgs_id, - tel.telemetry AS telemetry, + tel.telemetry, cwms.has_cwms, - COALESCE(op.parr::TEXT, '[]'::TEXT) AS projects, - COALESCE(c.constants, '{}') AS constants, - COALESCE(g.groups, '{}') AS groups, - COALESCE(a.alert_configs, '{}') AS alert_configs, - COALESCE(o.opts, '{}'::JSON)::TEXT AS opts + COALESCE(op.parr, '[]'::jsonb) projects, + COALESCE(c.constants, '{}')::uuid[] constants, + COALESCE(g.groups, '{}')::uuid[] groups, + COALESCE(a.alert_configs, '{}')::uuid[] alert_configs, + COALESCE(o.opts, '{}'::jsonb) opts FROM instrument i INNER JOIN instrument_type t ON t.id = i.type_id LEFT JOIN LATERAL ( SELECT - JSON_AGG(JSON_BUILD_OBJECT( + jsonb_agg(jsonb_build_object( 'id', p.id, 'name', p.name, 'slug', p.slug @@ -75,14 +74,14 @@ CREATE OR REPLACE VIEW v_instrument AS ( ) c ON c.instrument_id = i.id LEFT JOIN ( SELECT - ARRAY_AGG(instrument_group_id) as groups, + array_agg(instrument_group_id) as groups, instrument_id FROM instrument_group_instruments GROUP BY instrument_id ) g ON g.instrument_id = i.id LEFT JOIN ( SELECT - ARRAY_AGG(alert_config_id) as alert_configs, + array_agg(alert_config_id) as alert_configs, instrument_id FROM alert_config_instrument GROUP BY instrument_id @@ -90,7 +89,7 @@ CREATE OR REPLACE VIEW v_instrument AS ( LEFT JOIN ( SELECT instrument_id, - JSON_AGG(JSON_BUILD_OBJECT( + jsonb_agg(jsonb_build_object( 'id', v.id, 'slug', v.telemetry_type_slug, 'name', v.telemetry_type_name @@ -108,7 +107,7 @@ CREATE OR REPLACE VIEW v_instrument AS ( LEFT JOIN ( -- optional properties that vary per -- instrument can be added here via union - SELECT o1.instrument_id, (ROW_TO_JSON(o1)::JSONB || ROW_TO_JSON(b1)::JSONB)::JSON AS opts + SELECT o1.instrument_id, (ROW_TO_JSON(o1)::jsonb || row_to_json(b1)::jsonb) AS opts FROM saa_opts o1 LEFT JOIN LATERAL ( SELECT value AS bottom_elevation FROM timeseries_measurement m @@ -117,7 +116,7 @@ CREATE OR REPLACE VIEW v_instrument AS ( LIMIT 1 ) b1 ON true UNION ALL - SELECT o2.instrument_id, (ROW_TO_JSON(o2)::JSONB || ROW_TO_JSON(b2)::JSONB)::JSON AS opts + SELECT o2.instrument_id, (ROW_TO_JSON(o2)::jsonb || row_to_json(b2)::jsonb) AS opts FROM ipi_opts o2 LEFT JOIN LATERAL ( SELECT value AS bottom_elevation FROM timeseries_measurement m @@ -126,37 +125,48 @@ CREATE OR REPLACE VIEW v_instrument AS ( LIMIT 1 ) b2 ON true ) o ON o.instrument_id = i.id + WHERE NOT i.deleted ); CREATE OR REPLACE VIEW v_instrument_group AS ( WITH instrument_count AS ( - SELECT - igi.instrument_group_id, - count(igi.instrument_group_id) as i_count - FROM instrument_group_instruments igi - JOIN instrument i on igi.instrument_id = i.id and not i.deleted - GROUP BY igi.instrument_group_id - ), - timeseries_instruments as ( - SELECT t.id, t.instrument_id, igi.instrument_group_id from timeseries t - JOIN instrument i on i.id = t.instrument_id and not i.deleted - JOIN instrument_group_instruments igi on igi.instrument_id = i.id - ) - SELECT ig.id, - ig.slug, - ig.name, - ig.description, - ig.creator, - ig.create_date, - ig.updater, - ig.update_date, - ig.project_id, - ig.deleted, - COALESCE(ic.i_count,0) as instrument_count, - COALESCE(count(ti.id),0) as timeseries_count - FROM instrument_group ig - LEFT JOIN instrument_count ic on ic.instrument_group_id = ig.id - LEFT JOIN timeseries_instruments ti on ig.id = ti.instrument_group_id - GROUP BY ig.id, ic.i_count - ORDER BY ig.name + SELECT + igi.instrument_group_id, + count(igi.instrument_group_id) as i_count + FROM instrument_group_instruments igi + JOIN instrument i on igi.instrument_id = i.id and not i.deleted + GROUP BY igi.instrument_group_id + ), + timeseries_instruments as ( + SELECT t.id, t.instrument_id, igi.instrument_group_id from timeseries t + JOIN instrument i on i.id = t.instrument_id and not i.deleted + JOIN instrument_group_instruments igi on igi.instrument_id = i.id + ) + SELECT ig.id, + ig.slug, + ig.name, + ig.description, + ig.created_by, + ig.created_at, + ig.updated_by, + ig.updated_at, + ig.project_id, + COALESCE(ic.i_count,0) as instrument_count, + COALESCE(count(ti.id),0) as timeseries_count + FROM instrument_group ig + LEFT JOIN instrument_count ic on ic.instrument_group_id = ig.id + LEFT JOIN timeseries_instruments ti on ig.id = ti.instrument_group_id + GROUP BY ig.id, ic.i_count + ORDER BY ig.name +); + +CREATE OR REPLACE VIEW v_instrument_status AS ( + SELECT + s1.id, + s1.instrument_id, + s1.status_id, + s2.name status, + s1.time + FROM instrument_status s1 + INNER JOIN status s2 ON s1.status_id = s2.id ); diff --git a/api/migrations/repeat/0050__views_timeseries.sql b/api/migrations/repeat/0050__views_timeseries.sql index 9b5c7cff..01b4815d 100644 --- a/api/migrations/repeat/0050__views_timeseries.sql +++ b/api/migrations/repeat/0050__views_timeseries.sql @@ -80,3 +80,40 @@ CREATE OR REPLACE VIEW v_timeseries_cwms AS ( FROM v_timeseries ts INNER JOIN timeseries_cwms tc ON ts.id = tc.timeseries_id ); + +CREATE OR REPLACE VIEW v_collection_group_details AS ( + SELECT + cg.*, + ts.timeseries + FROM collection_group cg + LEFT JOIN LATERAL ( + SELECT COALESCE(jsonb_agg(to_jsonb(t.*) || jsonb_build_object( + 'latest_time', tm.time, + 'latest_value', tm.value, + 'sort_order', cgt.sort_order + ) ORDER BY sort_order ASC, t.name ASC), '[]'::jsonb) timeseries + FROM collection_group_timeseries cgt + LEFT JOIN v_timeseries t on t.id = cgt.timeseries_id + LEFT JOIN LATERAL ( + SELECT tmm.time, tmm.value FROM timeseries_measurement tmm + WHERE tmm.timeseries_id = t.id + ORDER BY tmm.time DESC LIMIT 1 + ) tm ON true + WHERE cgt.collection_group_id = cg.id + ) ts ON true +); + + +CREATE OR REPLACE VIEW v_timeseries_measurement AS ( + SELECT + m.timeseries_id, + m.time, + m.value, + n.masked, + n.validated, + n.annotation + FROM timeseries_measurement m + LEFT JOIN timeseries_notes n ON m.timeseries_id = n.timeseries_id AND m.time = n.time + INNER JOIN timeseries t on t.id = m.timeseries_id + ORDER BY m.time ASC +); diff --git a/api/migrations/repeat/0060__views_alerts.sql b/api/migrations/repeat/0060__views_alerts.sql index 214bd728..1a1780a9 100644 --- a/api/migrations/repeat/0060__views_alerts.sql +++ b/api/migrations/repeat/0060__views_alerts.sql @@ -1,18 +1,18 @@ CREATE OR REPLACE VIEW v_alert AS ( SELECT a.id AS id, a.alert_config_id AS alert_config_id, - a.create_date AS create_date, + a.created_at AS created_at, p.id AS project_id, p.name AS project_name, ac.name AS name, ac.body AS body, ( - SELECT COALESCE(JSON_AGG(JSON_BUILD_OBJECT( + SELECT COALESCE(jsonb_agg(jsonb_build_object( 'instrument_id', id, 'instrument_name', name - ))::text, '[]'::text) - FROM instrument - WHERE id = ANY( + )), '[]'::jsonb) + FROM instrument + WHERE id = ANY( SELECT iac.instrument_id FROM alert_config_instrument iac WHERE iac.alert_config_id = ac.id @@ -25,51 +25,52 @@ CREATE OR REPLACE VIEW v_alert AS ( CREATE OR REPLACE VIEW v_alert_config AS ( SELECT - ac.id AS id, - ac.name AS name, - ac.body AS body, - prf1.id AS creator, - COALESCE(prf1.username, 'midas') AS creator_username, - ac.create_date AS create_date, - prf2.id AS updater, - prf2.username AS updater_username, - ac.update_date AS update_date, - prj.id AS project_id, - prj.name AS project_name, - atype.id AS alert_type_id, - atype.name AS alert_type, - ac.start_date AS start_date, - ac.schedule_interval::text AS schedule_interval, - ac.mute_consecutive_alerts AS mute_consecutive_alerts, - ac.remind_interval::text AS remind_interval, - ac.warning_interval::text AS warning_interval, - ac.last_checked AS last_checked, - ac.last_reminded AS last_reminded, + ac.id, + ac.name, + ac.body, + prf1.id created_by, + COALESCE(prf1.username, 'midas') created_by_username, + ac.created_at, + prf2.id updated_by, + prf2.username updated_by_username, + ac.updated_at, + prj.id project_id, + prj.name project_name, + atype.id alert_type_id, + atype.name alert_type, + ac.started_at, + ac.schedule_interval::text schedule_interval, + ac.mute_consecutive_alerts, + ac.remind_interval::text remind_interval, + ac.warning_interval::text warning_interval, + ac.last_checked_at, + ac.last_reminded_at, + null::timestamptz create_next_submittal_from, ( - SELECT COALESCE(JSON_AGG(JSON_BUILD_OBJECT( - 'instrument_id', id, + SELECT COALESCE(jsonb_agg(jsonb_build_object( + 'instrument_id', id, 'instrument_name', name - ))::text, '[]'::text) - FROM instrument - WHERE id = ANY( + )), '[]'::jsonb) + FROM instrument + WHERE id = ANY( SELECT iac.instrument_id - FROM alert_config_instrument iac - WHERE iac.alert_config_id = ac.id + FROM alert_config_instrument iac + WHERE iac.alert_config_id = ac.id ) - ) AS instruments, + ) AS instruments, ( - SELECT COALESCE(JSON_AGG(JSON_BUILD_OBJECT( - 'id', id, + SELECT COALESCE(jsonb_agg(jsonb_build_object( + 'id', id, 'user_type', user_type, 'username', username, - 'email', email - ))::text, '[]'::text) + 'email', email + )), '[]'::jsonb) FROM ( SELECT id, - 'email' AS user_type, - null AS username, - email AS email + 'email' AS user_type, + null AS username, + email AS email FROM email WHERE id IN ( SELECT aes.email_id FROM alert_email_subscription aes @@ -78,39 +79,39 @@ CREATE OR REPLACE VIEW v_alert_config AS ( UNION SELECT id, - 'profile' AS user_type, - username AS username, - email AS email + 'profile' AS user_type, + username AS username, + email AS email FROM profile WHERE id IN ( SELECT aps.profile_id FROM alert_profile_subscription aps WHERE aps.alert_config_id = ac.id ) ) all_emails - ) AS alert_email_subscriptions + ) alert_email_subscriptions FROM alert_config ac - INNER JOIN project prj ON ac.project_id = prj.id - INNER JOIN alert_type atype ON ac.alert_type_id = atype.id - LEFT JOIN profile prf1 ON ac.creator = prf1.id - LEFT JOIN profile prf2 ON ac.updater = prf2.id + INNER JOIN project prj ON ac.project_id = prj.id + INNER JOIN alert_type atype ON ac.alert_type_id = atype.id + LEFT JOIN profile prf1 ON ac.created_by = prf1.id + LEFT JOIN profile prf2 ON ac.updated_by = prf2.id WHERE NOT ac.deleted ); CREATE OR REPLACE VIEW v_submittal AS ( SELECT - sub.id AS id, - ac.id AS alert_config_id, - ac.name AS alert_config_name, - aty.id AS alert_type_id, - aty.name AS alert_type_name, - ac.project_id AS project_id, - sst.id AS submittal_status_id, - sst.name AS submittal_status_name, - sub.completion_date AS completion_date, - sub.create_date AS create_date, - sub.due_date AS due_date, - sub.marked_as_missing AS marked_as_missing, - sub.warning_sent AS warning_sent + sub.id, + ac.id alert_config_id, + ac.name alert_config_name, + aty.id alert_type_id, + aty.name alert_type_name, + ac.project_id, + sst.id submittal_status_id, + sst.name submittal_status_name, + sub.completed_at, + sub.created_at, + sub.due_at, + sub.marked_as_missing, + sub.warning_sent FROM submittal sub INNER JOIN alert_config ac ON sub.alert_config_id = ac.id INNER JOIN submittal_status sst ON sub.submittal_status_id = sst.id diff --git a/api/migrations/repeat/0090__views_plots.sql b/api/migrations/repeat/0090__views_plots.sql index 109df2aa..dc028b56 100644 --- a/api/migrations/repeat/0090__views_plots.sql +++ b/api/migrations/repeat/0090__views_plots.sql @@ -4,17 +4,17 @@ CREATE OR REPLACE VIEW v_plot_configuration AS ( pc.slug, pc.name, pc.project_id, - pc.creator, - pc.create_date, - pc.updater, - pc.update_date, - COALESCE(k.show_masked, 'true') AS show_masked, + pc.created_by, + pc.created_at, + pc.updated_by, + pc.updated_at, + COALESCE(k.show_masked, 'true') AS show_masked, COALESCE(k.show_nonvalidated, 'true') AS show_nonvalidated, - COALESCE(k.show_comments, 'true') AS show_comments, - COALESCE(k.auto_range, 'true') AS auto_range, - COALESCE(k.date_range, '1 year') AS date_range, - COALESCE(k.threshold, 3000) AS threshold, - COALESCE(rc.configs, '[]')::text AS report_configs, + COALESCE(k.show_comments, 'true') AS show_comments, + COALESCE(k.auto_range, 'true') AS auto_range, + COALESCE(k.date_range, '1 year') AS date_range, + COALESCE(k.threshold, 3000) AS threshold, + COALESCE(rc.configs, '[]'::jsonb) AS report_configs, pc.plot_type, CASE WHEN pc.plot_type = 'scatter-line' THEN json_build_object( @@ -24,11 +24,11 @@ CREATE OR REPLACE VIEW v_plot_configuration AS ( 'y2_axis_title', pcl.y2_axis_title, 'custom_shapes', COALESCE(cs.items, '[]') ) - )::text + ) WHEN pc.plot_type = 'profile' THEN json_build_object( 'instrument_id', ppc.instrument_id, 'instrument_type', it.name - )::text + ) WHEN pc.plot_type = 'contour' THEN json_build_object( 'timeseries_ids', COALESCE(pcct.timeseries_ids, '{}'), 'time', CASE @@ -39,11 +39,11 @@ CREATE OR REPLACE VIEW v_plot_configuration AS ( 'gradient_smoothing', pcc.gradient_smoothing, 'contour_smoothing', pcc.contour_smoothing, 'show_labels', pcc.show_labels - )::text + ) WHEN pc.plot_type = 'bullseye' THEN json_build_object( 'x_axis_timeseries_id', pbc.x_axis_timeseries_id, 'y_axis_timeseries_id', pbc.y_axis_timeseries_id - )::text + ) ELSE NULL END AS display FROM plot_configuration pc @@ -61,7 +61,7 @@ CREATE OR REPLACE VIEW v_plot_configuration AS ( ) k ON pc.id = k.id LEFT JOIN LATERAL ( SELECT - json_agg(json_build_object( + jsonb_agg(jsonb_build_object( 'id', id, 'slug', slug, 'name', name diff --git a/api/migrations/repeat/0100__views_datalogger.sql b/api/migrations/repeat/0100__views_datalogger.sql index 50f978e4..0593dd5f 100644 --- a/api/migrations/repeat/0100__views_datalogger.sql +++ b/api/migrations/repeat/0100__views_datalogger.sql @@ -1,28 +1,28 @@ CREATE OR REPLACE VIEW v_datalogger AS ( SELECT - dl.id AS id, - dl.sn AS sn, - dl.project_id AS project_id, - p1.id AS creator, - p1.username AS creator_username, - dl.create_date AS create_date, - p2.id AS updater, - p2.username AS updater_username, - dl.update_date AS update_date, - dl.name AS name, - dl.slug AS slug, - m.id AS model_id, - m.model AS model, - COALESCE(e.errors, '{}'::TEXT[]) AS errors, - COALESCE(t.tables, '[]'::JSON)::TEXT AS tables + dl.id, + dl.sn, + dl.project_id, + p1.id created_by, + p1.username created_by_username, + dl.created_at, + p2.id updated_by, + p2.username updated_by_username, + dl.updated_at, + dl.name, + dl.slug, + m.id model_id, + m.model, + COALESCE(e.errors, '{}')::text[] AS errors, + COALESCE(t.tables, '[]'::jsonb) AS tables FROM datalogger dl - INNER JOIN profile p1 ON dl.creator = p1.id - INNER JOIN profile p2 ON dl.updater = p2.id + INNER JOIN profile p1 ON dl.created_by = p1.id + INNER JOIN profile p2 ON dl.updated_by = p2.id INNER JOIN datalogger_model m ON dl.model_id = m.id LEFT JOIN ( SELECT de.datalogger_id, - ARRAY_AGG(de.error_message) AS errors + array_agg(de.error_message)::text[] AS errors FROM datalogger_error de INNER JOIN datalogger_table dt ON dt.id = de.datalogger_table_id WHERE dt.table_name = 'preparse' @@ -31,7 +31,7 @@ CREATE OR REPLACE VIEW v_datalogger AS ( LEFT JOIN ( SELECT dt.datalogger_id, - JSON_AGG(JSON_BUILD_OBJECT( + jsonb_agg(jsonb_build_object( 'id', dt.id, 'table_name', dt.table_name )) AS tables @@ -45,7 +45,7 @@ CREATE OR REPLACE VIEW v_datalogger_preview AS ( SELECT p.datalogger_table_id, p.preview, - p.update_date + p.updated_at FROM datalogger_preview p INNER JOIN datalogger_table dt ON dt.id = p.datalogger_table_id INNER JOIN datalogger dl ON dl.id = dt.datalogger_id @@ -57,7 +57,7 @@ CREATE OR REPLACE VIEW v_datalogger_equivalency_table AS ( dt.datalogger_id AS datalogger_id, dt.id AS datalogger_table_id, dt.table_name AS datalogger_table_name, - COALESCE(JSON_AGG(ROW_TO_JSON(eq)) FILTER (WHERE eq.id IS NOT NULL), '[]'::JSON)::TEXT AS fields + COALESCE(jsonb_agg(to_jsonb(eq)) FILTER (WHERE eq.id IS NOT NULL), '[]'::jsonb) AS fields FROM datalogger_table dt INNER JOIN datalogger dl ON dt.datalogger_id = dl.id LEFT JOIN LATERAL ( @@ -72,11 +72,11 @@ CREATE OR REPLACE VIEW v_datalogger_equivalency_table AS ( CREATE OR REPLACE VIEW v_datalogger_hash AS ( SELECT dh.datalogger_id AS datalogger_id, - dh.hash AS "hash", - m.model AS model, - dl.sn AS sn + dh.hash AS "hash", + m.model AS model, + dl.sn AS sn FROM datalogger_hash dh - INNER JOIN datalogger dl ON dh.datalogger_id = dl.id + INNER JOIN datalogger dl ON dh.datalogger_id = dl.id INNER JOIN datalogger_model m ON dl.model_id = m.id WHERE NOT dl.deleted ); diff --git a/api/migrations/repeat/0110__views_evaluations.sql b/api/migrations/repeat/0110__views_evaluations.sql index ebffad49..34c1d1a0 100644 --- a/api/migrations/repeat/0110__views_evaluations.sql +++ b/api/migrations/repeat/0110__views_evaluations.sql @@ -1,37 +1,37 @@ CREATE OR REPLACE VIEW v_evaluation AS ( SELECT - ev.id AS id, - ev.name AS name, - ev.body AS body, - prf1.id AS creator, - COALESCE(prf1.username, 'midas') AS creator_username, - ev.create_date AS create_date, - prf2.id AS updater, - prf2.username AS updater_username, - ev.update_date AS update_date, - prj.id AS project_id, - prj.name AS project_name, - ac.id AS alert_config_id, - ac.name AS alert_config_name, - ev.submittal_id AS submittal_id, - ev.start_date AS start_date, - ev.end_date AS end_date, + ev.id, + ev.name, + ev.body, + prf1.id created_by, + COALESCE(prf1.username, 'midas') created_by_username, + ev.created_at, + prf2.id updated_by, + prf2.username updated_by_username, + ev.updated_at, + prj.id project_id, + prj.name project_name, + ac.id alert_config_id, + ac.name alert_config_name, + ev.submittal_id, + ev.started_at, + ev.ended_at, ( - SELECT COALESCE(JSON_AGG(JSON_BUILD_OBJECT( - 'instrument_id', id, + SELECT COALESCE(jsonb_agg(jsonb_build_object( + 'instrument_id', id, 'instrument_name', name - ))::text, '[]'::text) - FROM instrument - WHERE id = ANY( + )), '[]'::jsonb) + FROM instrument + WHERE id = ANY( SELECT evi.instrument_id - FROM evaluation_instrument evi - WHERE evi.evaluation_id = ev.id + FROM evaluation_instrument evi + WHERE evi.evaluation_id = ev.id ) - ) AS instruments + ) instruments FROM evaluation ev INNER JOIN project prj ON ev.project_id = prj.id - LEFT JOIN profile prf1 ON ev.creator = prf1.id - LEFT JOIN profile prf2 ON ev.updater = prf2.id + LEFT JOIN profile prf1 ON ev.created_by = prf1.id + LEFT JOIN profile prf2 ON ev.updated_by = prf2.id LEFT JOIN submittal sub ON sub.id = ev.submittal_id LEFT JOIN alert_config ac ON ac.id = sub.alert_config_id ); diff --git a/api/migrations/repeat/0120__views_alert_check.sql b/api/migrations/repeat/0120__views_alert_check.sql index b0362b25..999ee9b2 100644 --- a/api/migrations/repeat/0120__views_alert_check.sql +++ b/api/migrations/repeat/0120__views_alert_check.sql @@ -2,37 +2,38 @@ CREATE OR REPLACE VIEW v_alert_check_measurement_submittal AS ( SELECT ac.id AS alert_config_id, sub.id AS submittal_id, + null AS submittal, COALESCE( ac.warning_interval != INTERVAL '0' - AND sub.completion_date IS NULL - AND NOW() >= sub.due_date - ac.warning_interval - AND NOW() < sub.due_date - AND true = ANY(SELECT UNNEST(ARRAY_AGG(lm.time)) IS NULL), + AND sub.completed_at IS NULL + AND NOW() >= sub.due_at - ac.warning_interval + AND NOW() < sub.due_at + AND true = ANY(SELECT UNNEST(array_agg(lm.time)) IS NULL), true - ) AS should_warn, + )::boolean AS should_warn, COALESCE( - sub.completion_date IS NULL + sub.completed_at IS NULL AND NOT sub.marked_as_missing - AND NOW() >= sub.due_date - AND true = ANY(SELECT UNNEST(ARRAY_AGG(lm.time)) IS NULL), + AND NOW() >= sub.due_at + AND true = ANY(SELECT UNNEST(array_agg(lm.time)) IS NULL), true - ) AS should_alert, + )::boolean AS should_alert, COALESCE( ac.remind_interval != INTERVAL '0' - AND ac.last_reminded IS NOT NULL - AND sub.completion_date IS NULL + AND ac.last_reminded_at IS NOT NULL + AND sub.completed_at IS NULL AND NOT sub.marked_as_missing - AND NOW() >= sub.due_date + AND NOW() >= sub.due_at -- subtract 10 second constant to account for ticker accuracy/execution time - AND NOW() >= ac.last_reminded + ac.remind_interval - INTERVAL '10 seconds', + AND NOW() >= ac.last_reminded_at + ac.remind_interval - INTERVAL '10 seconds', true - ) AS should_remind, - COALESCE(JSON_AGG(JSON_BUILD_OBJECT( + )::boolean AS should_remind, + COALESCE(json_agg(json_build_object( 'instrument_name', inst.name, 'timeseries_name', COALESCE(ts.name, 'No timeseries for instrument'), 'status', CASE - WHEN NOW() >= sub.due_date THEN 'missing' - WHEN NOW() < sub.due_date THEN 'warning' + WHEN NOW() >= sub.due_at THEN 'missing' + WHEN NOW() < sub.due_at THEN 'warning' ELSE 'N/A' END )) FILTER (WHERE lm.time IS NULL), '[]')::text AS affected_timeseries @@ -45,7 +46,7 @@ CREATE OR REPLACE VIEW v_alert_check_measurement_submittal AS ( LEFT JOIN LATERAL ( SELECT timeseries_id, - MAX(time) FILTER (WHERE time > sub.create_date AND time <= sub.due_date) AS time + MAX(time) FILTER (WHERE time > sub.created_at AND time <= sub.due_at) AS time FROM timeseries_measurement WHERE timeseries_id = ANY(SELECT id FROM timeseries WHERE instrument_id = inst.id) AND NOT timeseries_id = ANY(SELECT timeseries_id FROM instrument_constants) @@ -61,29 +62,30 @@ CREATE OR REPLACE VIEW v_alert_check_evaluation_submittal AS ( SELECT ac.id AS alert_config_id, sub.id AS submittal_id, + null AS submittal, COALESCE( ac.warning_interval != INTERVAL '0' - AND sub.completion_date IS NULL - AND NOW() >= sub.due_date - ac.warning_interval - AND NOW() < sub.due_date, + AND sub.completed_at IS NULL + AND NOW() >= sub.due_at - ac.warning_interval + AND NOW() < sub.due_at, true - ) AS should_warn, + )::boolean AS should_warn, COALESCE( - sub.completion_date IS NULL - AND NOW() >= sub.due_date + sub.completed_at IS NULL + AND NOW() >= sub.due_at AND NOT sub.marked_as_missing, true - ) AS should_alert, + )::boolean AS should_alert, COALESCE( ac.remind_interval != INTERVAL '0' - AND ac.last_reminded IS NOT NULL - AND sub.completion_date IS NULL - AND NOW() >= sub.due_date + AND ac.last_reminded_at IS NOT NULL + AND sub.completed_at IS NULL + AND NOW() >= sub.due_at -- subtract 10 second constant to account for ticker accuracy/execution time - AND NOW() >= ac.last_reminded + ac.remind_interval - INTERVAL '10 seconds' + AND NOW() >= ac.last_reminded_at + ac.remind_interval - INTERVAL '10 seconds' AND NOT sub.marked_as_missing, true - ) AS should_remind + )::boolean AS should_remind FROM submittal sub INNER JOIN alert_config ac ON sub.alert_config_id = ac.id WHERE ac.alert_type_id = 'da6ee89e-58cc-4d85-8384-43c3c33a68bd'::UUID diff --git a/api/migrations/repeat/0130__views_district_rollup.sql b/api/migrations/repeat/0130__views_district_rollup.sql index 768e99f2..8fbe6a56 100644 --- a/api/migrations/repeat/0130__views_district_rollup.sql +++ b/api/migrations/repeat/0130__views_district_rollup.sql @@ -1,29 +1,29 @@ CREATE OR REPLACE VIEW v_district_rollup AS ( SELECT - ac.alert_type_id AS alert_type_id, - dt.office_id AS office_id, - dt.initials AS district_initials, - prj.name AS project_name, - prj.id AS project_id, - DATE_TRUNC('month', sub.due_date) AS the_month, - COUNT(sub.*) AS expected_total_submittals, - COUNT(sub.completion_date) FILTER ( - WHERE sub.completion_date IS NOT NULL - ) AS actual_total_submittals, + ac.alert_type_id, + dt.office_id, + dt.initials district_initials, + prj.name project_name, + prj.id project_id, + DATE_TRUNC('month', sub.due_at)::timestamptz "month", + COUNT(sub.*) expected_total_submittals, + COUNT(sub.completed_at) FILTER ( + WHERE sub.completed_at IS NOT NULL + ) actual_total_submittals, COUNT(sub.*) FILTER ( WHERE sub.submittal_status_id = '84a0f437-a20a-4ac2-8a5b-f8dc35e8489b' - ) AS red_submittals, + ) red_submittals, COUNT(sub.*) FILTER ( WHERE sub.submittal_status_id = 'ef9a3235-f6e2-4e6c-92f6-760684308f7f' - ) AS yellow_submittals, + ) yellow_submittals, COUNT(sub.*) FILTER ( WHERE sub.submittal_status_id = '0c0d6487-3f71-4121-8575-19514c7b9f03' - ) AS green_submittals + ) green_submittals FROM alert_config ac INNER JOIN project prj ON ac.project_id = prj.id LEFT JOIN district dt ON dt.id = prj.district_id LEFT JOIN submittal sub ON sub.alert_config_id = ac.id - WHERE sub.due_date <= NOW() - GROUP BY ac.alert_type_id, dt.office_id, dt.initials, prj.id, prj.name, DATE_TRUNC('month', sub.due_date) - ORDER BY DATE_TRUNC('month', sub.due_date), ac.alert_type_id + WHERE sub.due_at <= NOW() + GROUP BY ac.alert_type_id, dt.office_id, dt.initials, prj.id, prj.name, DATE_TRUNC('month', sub.due_at) + ORDER BY DATE_TRUNC('month', sub.due_at), ac.alert_type_id ); diff --git a/api/migrations/repeat/0140__views_depth_based_instruments.sql b/api/migrations/repeat/0140__views_depth_based_instruments.sql index 69ec54bd..2720b001 100644 --- a/api/migrations/repeat/0140__views_depth_based_instruments.sql +++ b/api/migrations/repeat/0140__views_depth_based_instruments.sql @@ -1,3 +1,9 @@ +-- TODO: some of this logic in the *_measurement views progressively got more complex, it might be better to +-- move these aggregate calculations to the business logic where less data would go over the wire +-- and would (hopefully) be less of a burden to maintain, +-- For later, look into using channels to aggregate multiple instruments simutaniously: +-- https://echo.labstack.com/docs/context#concurrency + CREATE OR REPLACE VIEW v_saa_segment AS ( SELECT seg.id, @@ -17,26 +23,27 @@ CREATE OR REPLACE VIEW v_saa_segment AS ( ) sub ON true ); + CREATE OR REPLACE VIEW v_saa_measurement AS ( SELECT r.instrument_id, r.time, - JSON_AGG(JSON_BUILD_OBJECT( - 'segment_id', r.segment_id, - 'x', r.x, - 'y', r.y, - 'z', r.z, - 'temp', r.t, - 'x_increment', r.x_increment, - 'y_increment', r.y_increment, - 'z_increment', r.z_increment, - 'temp_increment', r.temp_increment, - 'x_cum_dev', r.x_cum_dev, - 'y_cum_dev', r.y_cum_dev, - 'z_cum_dev', r.z_cum_dev, - 'temp_cum_dev', r.temp_cum_dev, - 'elevation', r.elevation - ) ORDER BY r.segment_id)::TEXT AS measurements + COALESCE(jsonb_agg(jsonb_build_object( + 'segment_id', r.segment_id, + 'x', r.x, + 'y', r.y, + 'z', r.z, + 'temp', r.t, + 'x_increment', r.x_increment, + 'y_increment', r.y_increment, + 'z_increment', r.z_increment, + 'temp_increment', r.temp_increment, + 'x_cum_dev', r.x_cum_dev, + 'y_cum_dev', r.y_cum_dev, + 'z_cum_dev', r.z_cum_dev, + 'temp_cum_dev', r.temp_cum_dev, + 'elevation', r.elevation + ) ORDER BY r.segment_id), '[]'::jsonb) AS measurements FROM (SELECT DISTINCT seg.instrument_id, seg.id AS segment_id, @@ -49,11 +56,11 @@ CREATE OR REPLACE VIEW v_saa_measurement AS ( q.initial_y - q.y y_increment, q.initial_z - q.z z_increment, q.initial_t - q.t temp_increment, - SUM(q.initial_x - q.x) FILTER (WHERE q.time >= q.initial_time) OVER (ORDER BY seg.id ASC) x_cum_dev, - SUM(q.initial_y - q.y) FILTER (WHERE q.time >= q.initial_time) OVER (ORDER BY seg.id ASC) y_cum_dev, - SUM(q.initial_z - q.z) FILTER (WHERE q.time >= q.initial_time) OVER (ORDER BY seg.id ASC) z_cum_dev, - SUM(q.initial_t - q.t) FILTER (WHERE q.time >= q.initial_time) OVER (ORDER BY seg.id ASC) temp_cum_dev, - SUM(q.bottom + q.seg_length) OVER (ORDER BY seg.id ASC) elevation + sum(q.initial_x - q.x) FILTER (WHERE q.time >= q.initial_time) OVER (ORDER BY seg.id ASC) x_cum_dev, + sum(q.initial_y - q.y) FILTER (WHERE q.time >= q.initial_time) OVER (ORDER BY seg.id ASC) y_cum_dev, + sum(q.initial_z - q.z) FILTER (WHERE q.time >= q.initial_time) OVER (ORDER BY seg.id ASC) z_cum_dev, + sum(q.initial_t - q.t) FILTER (WHERE q.time >= q.initial_time) OVER (ORDER BY seg.id ASC) temp_cum_dev, + sum(q.bottom + q.seg_length) OVER (ORDER BY seg.id ASC) elevation FROM saa_segment seg INNER JOIN saa_opts opts ON opts.instrument_id = seg.instrument_id LEFT JOIN LATERAL ( @@ -77,20 +84,21 @@ CREATE OR REPLACE VIEW v_saa_measurement AS ( SELECT time FROM timeseries_measurement WHERE time = opts.initial_time ) a LEFT JOIN LATERAL (SELECT time FROM timeseries_measurement WHERE time = opts.initial_time) ia ON true - LEFT JOIN (SELECT time, value FROM timeseries_measurement WHERE timeseries_id = seg.x_timeseries_id) x ON x.time = a.time - LEFT JOIN (SELECT time, value FROM timeseries_measurement WHERE timeseries_id = seg.y_timeseries_id) y ON y.time = a.time - LEFT JOIN (SELECT time, value FROM timeseries_measurement WHERE timeseries_id = seg.z_timeseries_id) z ON z.time = a.time - LEFT JOIN (SELECT time, value FROM timeseries_measurement WHERE timeseries_id = seg.temp_timeseries_id) t ON t.time = a.time - LEFT JOIN (SELECT time, value FROM timeseries_measurement WHERE timeseries_id = seg.x_timeseries_id) ix ON ix.time = ia.time - LEFT JOIN (SELECT time, value FROM timeseries_measurement WHERE timeseries_id = seg.y_timeseries_id) iy ON iy.time = ia.time - LEFT JOIN (SELECT time, value FROM timeseries_measurement WHERE timeseries_id = seg.z_timeseries_id) iz ON iz.time = ia.time - LEFT JOIN (SELECT time, value FROM timeseries_measurement WHERE timeseries_id = seg.temp_timeseries_id) it ON it.time = ia.time - LEFT JOIN (SELECT time, value FROM timeseries_measurement WHERE timeseries_id = opts.bottom_elevation_timeseries_id) b ON b.time = a.time - LEFT JOIN (SELECT time, value FROM timeseries_measurement WHERE timeseries_id = seg.length_timeseries_id) l ON l.time = a.time + LEFT JOIN timeseries_measurement x ON x.timeseries_id = seg.x_timeseries_id AND x.time = a.time + LEFT JOIN timeseries_measurement y ON y.timeseries_id = seg.y_timeseries_id AND y.time = a.time + LEFT JOIN timeseries_measurement z ON z.timeseries_id = seg.z_timeseries_id AND z.time = a.time + LEFT JOIN timeseries_measurement t ON t.timeseries_id = seg.temp_timeseries_id AND t.time = a.time + LEFT JOIN timeseries_measurement ix ON ix.timeseries_id = seg.x_timeseries_id AND ix.time = ia.time + LEFT JOIN timeseries_measurement iy ON iy.timeseries_id = seg.y_timeseries_id AND iy.time = ia.time + LEFT JOIN timeseries_measurement iz ON iz.timeseries_id = seg.z_timeseries_id AND iz.time = ia.time + LEFT JOIN timeseries_measurement it ON it.timeseries_id = seg.temp_timeseries_id AND it.time = ia.time + LEFT JOIN timeseries_measurement b ON b.timeseries_id = opts.bottom_elevation_timeseries_id AND b.time = a.time + LEFT JOIN timeseries_measurement l ON l.timeseries_id = seg.length_timeseries_id AND l.time = a.time ) q ON true) r GROUP BY r.instrument_id, r.time ); + CREATE OR REPLACE VIEW v_ipi_segment AS ( SELECT seg.id, @@ -108,18 +116,19 @@ CREATE OR REPLACE VIEW v_ipi_segment AS ( ) sub ON true ); + CREATE OR REPLACE VIEW v_ipi_measurement AS ( SELECT r.instrument_id, r.time, - JSON_AGG(JSON_BUILD_OBJECT( - 'segment_id', r.segment_id, - 'tilt', r.tilt, - 'inc_dev', r.inc_dev, - 'cum_dev', r.cum_dev, - 'temp', r.temp, - 'elevation', r.elevation - ) ORDER BY r.segment_id)::TEXT AS measurements + COALESCE(jsonb_agg(jsonb_build_object( + 'segment_id', r.segment_id, + 'tilt', r.tilt, + 'inc_dev', r.inc_dev, + 'cum_dev', r.cum_dev, + 'temp', r.temp, + 'elevation', r.elevation + ) ORDER BY r.segment_id), '[]'::jsonb) AS measurements FROM (SELECT DISTINCT seg.instrument_id, seg.id AS segment_id, @@ -127,9 +136,9 @@ CREATE OR REPLACE VIEW v_ipi_measurement AS ( q.time, q.tilt, q.inc_dev, - COALESCE(q.cum_dev, SIN(q.tilt * PI() / 180) * q.seg_length) cum_dev, + COALESCE(q.cum_dev, sin(q.tilt * pi() / 180) * q.seg_length) cum_dev, q.temp, - SUM(q.bottom + q.seg_length) OVER (ORDER BY seg.id ASC) elevation + sum(q.bottom + q.seg_length) OVER (ORDER BY seg.id ASC) elevation FROM ipi_segment seg INNER JOIN ipi_opts opts ON opts.instrument_id = seg.instrument_id LEFT JOIN LATERAL ( @@ -137,7 +146,7 @@ CREATE OR REPLACE VIEW v_ipi_measurement AS ( a.time, t.value AS tilt, d.value AS inc_dev, - SUM(d.value) OVER (ORDER BY seg.id ASC) AS cum_dev, + sum(d.value) OVER (ORDER BY seg.id ASC) AS cum_dev, temp.value AS temp, locf(b.value) OVER (ORDER BY a.time ASC) AS bottom, locf(l.value) OVER (ORDER BY a.time ASC) AS seg_length @@ -147,11 +156,88 @@ CREATE OR REPLACE VIEW v_ipi_measurement AS ( UNION SELECT time FROM timeseries_measurement WHERE time = opts.initial_time ) a - LEFT JOIN (SELECT time, value FROM timeseries_measurement WHERE timeseries_id = seg.tilt_timeseries_id) t ON t.time = a.time - LEFT JOIN (SELECT time, value FROM timeseries_measurement WHERE timeseries_id = seg.inc_dev_timeseries_id) d ON d.time = a.time - LEFT JOIN (SELECT time, value FROM timeseries_measurement WHERE timeseries_id = seg.temp_timeseries_id) temp ON temp.time = a.time - LEFT JOIN (SELECT time, value FROM timeseries_measurement WHERE timeseries_id = opts.bottom_elevation_timeseries_id) b ON b.time = a.time - LEFT JOIN (SELECT time, value FROM timeseries_measurement WHERE timeseries_id = seg.length_timeseries_id) l ON l.time = a.time + LEFT JOIN timeseries_measurement t ON t.timeseries_id = seg.tilt_timeseries_id AND t.time = a.time + LEFT JOIN timeseries_measurement d ON d.timeseries_id = seg.inc_dev_timeseries_id AND d.time = a.time + LEFT JOIN timeseries_measurement temp ON temp.timeseries_id = seg.temp_timeseries_id AND temp.time = a.time + LEFT JOIN timeseries_measurement b ON b.timeseries_id = opts.bottom_elevation_timeseries_id AND b.time = a.time + LEFT JOIN timeseries_measurement l ON l.timeseries_id = seg.length_timeseries_id AND l.time = a.time + ) q ON true) r + GROUP BY r.instrument_id, r.time +); + + +CREATE OR REPLACE VIEW v_incl_segment AS ( + SELECT + seg.id, + seg.instrument_id, + seg.depth_timeseries_id, + seg.a0_timeseries_id, + seg.a180_timeseries_id, + seg.b0_timeseries_id, + seg.b180_timeseries_id + FROM incl_segment seg +); + + +CREATE OR REPLACE VIEW v_incl_measurement AS ( + SELECT + r.instrument_id, + r.time, + COALESCE(jsonb_agg(jsonb_build_object( + 'depth', r.depth, + 'a0', r.a0, + 'a180', r.a180, + 'b0', r.b0, + 'b180', r.b180, + 'a_checksum', r.a_checksum, + 'a_comb', r.a_comb, + 'a_increment', r.a_inc, + 'a_cum_dev', r.a_cum_dev, + 'b_checksum', r.b_checksum, + 'b_comb', r.b_comb, + 'b_increment', r.b_inc, + 'b_cum_dev', r.b_cum_dev + ) ORDER BY r.segment_id), '[]'::jsonb) AS measurements + FROM (SELECT DISTINCT + seg.instrument_id, + seg.id segment_id, + q.time, + q.depth, + q.a0, + q.a180, + q.b0, + q.b180, + (q.a0 + q.a180) a_checksum, + (q.a0 - q.a180) / 2 a_comb, + (q.a0 - q.a180) / 2 / NULLIF(q.bottom, 0) * 24 a_inc, + sum((q.a0 - q.a180) / 2 / NULLIF(q.bottom, 0) * 24) OVER (ORDER BY q.depth desc) a_cum_dev, + (q.b0 + q.b180) b_checksum, + (q.b0 - q.b180) / 2 b_comb, + (q.b0 - q.b180) / 2 / NULLIF(q.bottom, 0) * 24 b_inc, + sum((q.b0 - q.b180) / 2 / NULLIF(q.bottom, 0) * 24) OVER (ORDER BY q.depth desc) b_cum_dev + FROM incl_segment seg + INNER JOIN incl_opts opts ON opts.instrument_id = seg.instrument_id + LEFT JOIN LATERAL ( + SELECT + a.time, + d.value depth, + a0.value a0, + a180.value a180, + b0.value b0, + b180.value b180, + locf(b.value) OVER (ORDER BY a.time ASC) AS bottom + FROM ( + SELECT DISTINCT time FROM timeseries_measurement + WHERE timeseries_id IN (SELECT id FROM timeseries WHERE instrument_id = seg.instrument_id) + UNION + SELECT time FROM timeseries_measurement WHERE time = opts.initial_time + ) a + LEFT JOIN timeseries_measurement d ON d.timeseries_id = seg.depth_timeseries_id AND d.time = a.time + LEFT JOIN timeseries_measurement a0 ON a0.timeseries_id = seg.a0_timeseries_id AND a0.time = a.time + LEFT JOIN timeseries_measurement a180 ON a180.timeseries_id = seg.a180_timeseries_id AND a180.time = a.time + LEFT JOIN timeseries_measurement b0 ON b0.timeseries_id = seg.b0_timeseries_id AND b0.time = a.time + LEFT JOIN timeseries_measurement b180 ON b180.timeseries_id = seg.b180_timeseries_id AND b180.time = a.time + LEFT JOIN timeseries_measurement b ON b.timeseries_id = opts.bottom_elevation_timeseries_id AND b.time = a.time ) q ON true) r GROUP BY r.instrument_id, r.time ); diff --git a/api/migrations/repeat/0150__views_domain.sql b/api/migrations/repeat/0150__views_domain.sql index 807f5ca4..9fc1c889 100644 --- a/api/migrations/repeat/0150__views_domain.sql +++ b/api/migrations/repeat/0150__views_domain.sql @@ -60,11 +60,11 @@ CREATE OR REPLACE VIEW v_domain AS ( CREATE OR REPLACE VIEW v_domain_group AS ( SELECT "group", - json_agg(json_build_object( + jsonb_agg(jsonb_build_object( 'id', id, 'value', value, 'description', description - ))::text AS opts + )) AS opts FROM v_domain GROUP BY "group" ); diff --git a/api/migrations/repeat/0160__views_report_config.sql b/api/migrations/repeat/0160__views_report_config.sql index 95878191..f0ec56eb 100644 --- a/api/migrations/repeat/0160__views_report_config.sql +++ b/api/migrations/repeat/0160__views_report_config.sql @@ -7,34 +7,34 @@ CREATE OR REPLACE VIEW v_report_config AS ( rc.project_id, p.name AS project_name, dt.name AS district_name, - rc.creator, - cp.username AS creator_username, - rc.create_date, - rc.updater, - up.username AS updater_username, - rc.update_date, - COALESCE(pc.configs, '[]')::text AS plot_configs, - json_build_object( - 'date_range', json_build_object( + rc.created_by, + cp.username AS created_by_username, + rc.created_at, + rc.updated_by, + up.username AS updated_by_username, + rc.updated_at, + COALESCE(pc.configs, '[]'::jsonb) AS plot_configs, + jsonb_build_object( + 'date_range', jsonb_build_object( 'enabled', rc.date_range_enabled, 'value', rc.date_range ), - 'show_masked', json_build_object( + 'show_masked', jsonb_build_object( 'enabled', rc.show_masked_enabled, 'value', rc.show_masked ), - 'show_nonvalidated', json_build_object( + 'show_nonvalidated', jsonb_build_object( 'enabled', rc.show_nonvalidated_enabled, 'value', rc.show_nonvalidated ) - )::text AS global_overrides + ) AS global_overrides FROM report_config rc INNER JOIN project p ON rc.project_id = p.id LEFT JOIN district dt ON p.district_id = dt.id - INNER JOIN profile cp ON cp.id = rc.creator - LEFT JOIN profile up ON up.id = rc.updater + INNER JOIN profile cp ON cp.id = rc.created_by + LEFT JOIN profile up ON up.id = rc.updated_by LEFT JOIN LATERAL ( - SELECT json_agg(json_build_object( + SELECT jsonb_agg(jsonb_build_object( 'id', pc.id, 'slug', pc.slug, 'name', pc.name diff --git a/api/migrations/repeat/0170__views_uploader.sql b/api/migrations/repeat/0170__views_uploader.sql new file mode 100644 index 00000000..647d79e9 --- /dev/null +++ b/api/migrations/repeat/0170__views_uploader.sql @@ -0,0 +1,28 @@ +CREATE VIEW v_uploader_config AS ( + SELECT + u.id, + u.project_id, + u.slug, + u.name, + u.description, + u.created_at, + u.created_by, + pc.username created_by_username, + u.updated_by, + u.updated_at, + pu.username updated_by_username, + u.type, + u.tz_name, + u.time_field, + u.validated_field_enabled, + u.validated_field, + u.masked_field_enabled, + u.masked_field, + u.comment_field_enabled, + u.comment_field, + u.column_offset, + u.row_offset + FROM uploader_config u + INNER JOIN profile pc ON u.created_by = pc.id + LEFT JOIN profile pu ON u.updated_by = pu.id +); diff --git a/api/migrations/schema/V1.14.00__uploader.sql b/api/migrations/schema/V1.14.00__uploader.sql new file mode 100644 index 00000000..25aeafee --- /dev/null +++ b/api/migrations/schema/V1.14.00__uploader.sql @@ -0,0 +1,32 @@ +-- checked db and there are no null instrument geometries +ALTER TABLE instrument ALTER COLUMN "geometry" TYPE geometry(Point, 4326); +ALTER TABLE instrument ALTER COLUMN "geometry" SET NOT NULL; + + +CREATE TYPE uploader_config_type AS ENUM ('csv', 'dux', 'toa5'); + + +CREATE TABLE uploader_config ( + id uuid PRIMARY KEY DEFAULT uuid_generate_v4(), + project_id uuid NOT NULL REFERENCES project(id), + slug text UNIQUE NOT NULL, + name text NOT NULL, + description text NOT NULL, + create_date timestamptz NOT NULL DEFAULT now(), + creator uuid NOT NULL REFERENCES profile(id), + update_date timestamptz, + updater uuid REFERENCES profile(id), + type uploader_config_type NOT NULL, + tz_name text NOT NULL DEFAULT 'UTC' +); + + +CREATE TABLE uploader_config_mapping ( + uploader_config_id uuid NOT NULL REFERENCES uploader_config(id) ON DELETE CASCADE, + field_name text NOT NULL, + timeseries_id uuid REFERENCES timeseries(id) ON DELETE SET NULL, + CONSTRAINT uploader_config_mapping_uploader_config_id_field_name UNIQUE(uploader_config_id, field_name) +); + +ALTER TABLE collection_group ADD COLUMN sort_order integer NOT NULL DEFAULT 0; +ALTER TABLE collection_group_timeseries ADD COLUMN sort_order integer NOT NULL DEFAULT 0; diff --git a/api/migrations/schema/V1.15.00__incl_opts.sql b/api/migrations/schema/V1.15.00__incl_opts.sql new file mode 100644 index 00000000..e5669d77 --- /dev/null +++ b/api/migrations/schema/V1.15.00__incl_opts.sql @@ -0,0 +1,17 @@ +CREATE TABLE incl_opts ( + instrument_id uuid NOT NULL REFERENCES instrument(id) ON DELETE CASCADE, + num_segments int NOT NULL, + bottom_elevation_timeseries_id uuid REFERENCES timeseries(id), + initial_time timestamptz +); + +CREATE TABLE incl_segment ( + instrument_id uuid NOT NULL REFERENCES instrument(id) ON DELETE CASCADE, + id int NOT NULL, + depth_timeseries_id uuid REFERENCES timeseries(id), + a0_timeseries_id uuid REFERENCES timeseries(id), + a180_timeseries_id uuid REFERENCES timeseries(id), + b0_timeseries_id uuid REFERENCES timeseries(id), + b180_timeseries_id uuid REFERENCES timeseries(id), + PRIMARY KEY (instrument_id, id) +); diff --git a/api/migrations/schema/V1.16.00__uploader_config.sql b/api/migrations/schema/V1.16.00__uploader_config.sql new file mode 100644 index 00000000..f9da6073 --- /dev/null +++ b/api/migrations/schema/V1.16.00__uploader_config.sql @@ -0,0 +1,8 @@ +ALTER TABLE uploader_config +ADD COLUMN time_field text NOT NULL, +ADD COLUMN validated_field_enabled boolean NOT NULL DEFAULT false, +ADD COLUMN validated_field text, +ADD COLUMN masked_field_enabled boolean NOT NULL DEFAULT false, +ADD COLUMN masked_field text, +ADD COLUMN comment_field_enabled boolean NOT NULL DEFAULT false, +ADD COLUMN comment_field text; diff --git a/api/migrations/schema/V1.17.00__evaluation_del_cascade.sql b/api/migrations/schema/V1.17.00__evaluation_del_cascade.sql new file mode 100644 index 00000000..831e184d --- /dev/null +++ b/api/migrations/schema/V1.17.00__evaluation_del_cascade.sql @@ -0,0 +1,8 @@ +ALTER TABLE evaluation_instrument +DROP CONSTRAINT evaluation_instrument_evaluation_id_fkey; + +ALTER TABLE evaluation_instrument +ADD CONSTRAINT evaluation_instrument_evaluation_id_fkey +FOREIGN KEY (evaluation_id) +REFERENCES evaluation(id) +ON DELETE CASCADE; diff --git a/api/migrations/schema/V1.18.00__timeseries_type_not_null.sql b/api/migrations/schema/V1.18.00__timeseries_type_not_null.sql new file mode 100644 index 00000000..da270224 --- /dev/null +++ b/api/migrations/schema/V1.18.00__timeseries_type_not_null.sql @@ -0,0 +1,2 @@ +UPDATE timeseries SET type='standard' WHERE type IS NULL; +ALTER TABLE timeseries ALTER COLUMN type SET NOT NULL; diff --git a/api/migrations/schema/V1.19.00__audit_into_rename.sql b/api/migrations/schema/V1.19.00__audit_into_rename.sql new file mode 100644 index 00000000..2671b1f7 --- /dev/null +++ b/api/migrations/schema/V1.19.00__audit_into_rename.sql @@ -0,0 +1,74 @@ +ALTER TABLE collection_group RENAME creator TO created_by; +ALTER TABLE collection_group RENAME create_date TO created_at; +ALTER TABLE collection_group RENAME updater TO updated_by; +ALTER TABLE collection_group RENAME update_date TO updated_at; + +ALTER TABLE report_config RENAME creator TO created_by; +ALTER TABLE report_config RENAME create_date TO created_at; +ALTER TABLE report_config RENAME updater TO updated_by; +ALTER TABLE report_config RENAME update_date TO updated_at; + +ALTER TABLE report_download_job RENAME creator TO created_by; +ALTER TABLE report_download_job RENAME create_date TO created_at; +ALTER TABLE report_download_job RENAME progress_update_date TO progress_updated_at; + +ALTER TABLE datalogger RENAME creator TO created_by; +ALTER TABLE datalogger RENAME create_date TO created_at; +ALTER TABLE datalogger RENAME updater TO updated_by; +ALTER TABLE datalogger RENAME update_date TO updated_at; + +ALTER TABLE project RENAME creator TO created_by; +ALTER TABLE project RENAME create_date TO created_at; +ALTER TABLE project RENAME updater TO updated_by; +ALTER TABLE project RENAME update_date TO updated_at; + +ALTER TABLE instrument_group RENAME creator TO created_by; +ALTER TABLE instrument_group RENAME create_date TO created_at; +ALTER TABLE instrument_group RENAME updater TO updated_by; +ALTER TABLE instrument_group RENAME update_date TO updated_at; + +ALTER TABLE instrument RENAME creator TO created_by; +ALTER TABLE instrument RENAME create_date TO created_at; +ALTER TABLE instrument RENAME updater TO updated_by; +ALTER TABLE instrument RENAME update_date TO updated_at; + +ALTER TABLE alert RENAME create_date TO created_at; + +ALTER TABLE alert_config RENAME creator TO created_by; +ALTER TABLE alert_config RENAME create_date TO created_at; +ALTER TABLE alert_config RENAME updater TO updated_by; +ALTER TABLE alert_config RENAME update_date TO updated_at; +ALTER TABLE alert_config RENAME last_checked TO last_checked_at; +ALTER TABLE alert_config RENAME last_reminded TO last_reminded_at; +ALTER TABLE alert_config RENAME start_date TO started_at; + +ALTER TABLE instrument_note RENAME creator TO created_by; +ALTER TABLE instrument_note RENAME create_date TO created_at; +ALTER TABLE instrument_note RENAME updater TO updated_by; +ALTER TABLE instrument_note RENAME update_date TO updated_at; + +ALTER TABLE plot_configuration RENAME creator TO created_by; +ALTER TABLE plot_configuration RENAME create_date TO created_at; +ALTER TABLE plot_configuration RENAME updater TO updated_by; +ALTER TABLE plot_configuration RENAME update_date TO updated_at; + +ALTER TABLE uploader_config RENAME creator TO created_by; +ALTER TABLE uploader_config RENAME create_date TO created_at; +ALTER TABLE uploader_config RENAME updater TO updated_by; +ALTER TABLE uploader_config RENAME update_date TO updated_at; + +ALTER TABLE evaluation RENAME creator TO created_by; +ALTER TABLE evaluation RENAME create_date TO created_at; +ALTER TABLE evaluation RENAME updater TO updated_by; +ALTER TABLE evaluation RENAME update_date TO updated_at; +ALTER TABLE evaluation RENAME start_date TO started_at; +ALTER TABLE evaluation RENAME end_date TO ended_at; + +ALTER TABLE submittal RENAME completion_date TO completed_at; +ALTER TABLE submittal RENAME due_date TO due_at; +ALTER TABLE submittal RENAME create_date TO created_at; + +ALTER TABLE datalogger_preview RENAME update_date TO updated_at; + +ALTER TABLE datalogger ALTER COLUMN updated_by DROP NOT NULL; +ALTER TABLE datalogger ALTER COLUMN updated_at DROP NOT NULL; diff --git a/api/migrations/schema/V1.20.00__fix_missing_profile_references.sql b/api/migrations/schema/V1.20.00__fix_missing_profile_references.sql new file mode 100644 index 00000000..3d4ed59c --- /dev/null +++ b/api/migrations/schema/V1.20.00__fix_missing_profile_references.sql @@ -0,0 +1,107 @@ +-- To clean up tables where the create/update profiles are null, +-- we can create a user with a zero-value UUID, then update +-- those column to reference this profile + +INSERT INTO profile (id, edipi, username, display_name, email, is_admin) +VALUES ('00000000-0000-0000-0000-000000000000', 0, 'MIDAS', 'MIDAS', 'MIDAS', false); + +UPDATE project +SET created_by = '00000000-0000-0000-0000-000000000000'::uuid +WHERE NOT created_by = any(SELECT id FROM profile WHERE id != '00000000-0000-0000-0000-000000000000'::uuid); +UPDATE datalogger +SET created_by = '00000000-0000-0000-0000-000000000000'::uuid +WHERE NOT created_by = any(SELECT id FROM profile WHERE id != '00000000-0000-0000-0000-000000000000'::uuid); +UPDATE instrument_group +SET created_by = '00000000-0000-0000-0000-000000000000'::uuid +WHERE NOT created_by = any(SELECT id FROM profile WHERE id != '00000000-0000-0000-0000-000000000000'::uuid); +UPDATE instrument +SET created_by = '00000000-0000-0000-0000-000000000000'::uuid +WHERE NOT created_by = any(SELECT id FROM profile WHERE id != '00000000-0000-0000-0000-000000000000'::uuid); +UPDATE alert_config +SET created_by = '00000000-0000-0000-0000-000000000000'::uuid +WHERE NOT created_by = any(SELECT id FROM profile WHERE id != '00000000-0000-0000-0000-000000000000'::uuid); +UPDATE instrument_note +SET created_by = '00000000-0000-0000-0000-000000000000'::uuid +WHERE NOT created_by = any(SELECT id FROM profile WHERE id != '00000000-0000-0000-0000-000000000000'::uuid); +UPDATE collection_group +SET created_by = '00000000-0000-0000-0000-000000000000'::uuid +WHERE NOT created_by = any(SELECT id FROM profile WHERE id != '00000000-0000-0000-0000-000000000000'::uuid); +UPDATE plot_configuration +SET created_by = '00000000-0000-0000-0000-000000000000'::uuid +WHERE NOT created_by = any(SELECT id FROM profile WHERE id != '00000000-0000-0000-0000-000000000000'::uuid); +UPDATE evaluation +SET created_by = '00000000-0000-0000-0000-000000000000'::uuid +WHERE NOT created_by = any(SELECT id FROM profile WHERE id != '00000000-0000-0000-0000-000000000000'::uuid); + +UPDATE project +SET updated_by = '00000000-0000-0000-0000-000000000000'::uuid +WHERE NOT updated_by = any(SELECT id FROM profile WHERE id != '00000000-0000-0000-0000-000000000000'::uuid) +AND updated_by IS NOT NULL; +UPDATE datalogger +SET updated_by = '00000000-0000-0000-0000-000000000000'::uuid +WHERE NOT updated_by = any(SELECT id FROM profile WHERE id != '00000000-0000-0000-0000-000000000000'::uuid) +AND updated_by IS NOT NULL; +UPDATE instrument_group +SET updated_by = '00000000-0000-0000-0000-000000000000'::uuid +WHERE NOT updated_by = any(SELECT id FROM profile WHERE id != '00000000-0000-0000-0000-000000000000'::uuid) +AND updated_by IS NOT NULL; +UPDATE instrument +SET updated_by = '00000000-0000-0000-0000-000000000000'::uuid +WHERE NOT updated_by = any(SELECT id FROM profile WHERE id != '00000000-0000-0000-0000-000000000000'::uuid) +AND updated_by IS NOT NULL; +UPDATE alert_config +SET updated_by = '00000000-0000-0000-0000-000000000000'::uuid +WHERE NOT updated_by = any(SELECT id FROM profile WHERE id != '00000000-0000-0000-0000-000000000000'::uuid) +AND updated_by IS NOT NULL; +UPDATE instrument_note +SET updated_by = '00000000-0000-0000-0000-000000000000'::uuid +WHERE NOT updated_by = any(SELECT id FROM profile WHERE id != '00000000-0000-0000-0000-000000000000'::uuid) +AND updated_by IS NOT NULL; +UPDATE collection_group +SET updated_by = '00000000-0000-0000-0000-000000000000'::uuid +WHERE NOT updated_by = any(SELECT id FROM profile WHERE id != '00000000-0000-0000-0000-000000000000'::uuid) +AND updated_by IS NOT NULL; +UPDATE plot_configuration +SET updated_by = '00000000-0000-0000-0000-000000000000'::uuid +WHERE NOT updated_by = any(SELECT id FROM profile WHERE id != '00000000-0000-0000-0000-000000000000'::uuid) +AND updated_by IS NOT NULL; +UPDATE evaluation +SET updated_by = '00000000-0000-0000-0000-000000000000'::uuid +WHERE NOT updated_by = any(SELECT id FROM profile WHERE id != '00000000-0000-0000-0000-000000000000'::uuid) +AND updated_by IS NOT NULL; + +ALTER TABLE project +ADD FOREIGN KEY (created_by) REFERENCES profile(id) ON UPDATE CASCADE ON DELETE SET DEFAULT, +ADD FOREIGN KEY (updated_by) REFERENCES profile(id) ON UPDATE CASCADE ON DELETE SET NULL; + +ALTER TABLE datalogger +ADD FOREIGN KEY (created_by) REFERENCES profile(id) ON UPDATE CASCADE ON DELETE SET DEFAULT, +ADD FOREIGN KEY (updated_by) REFERENCES profile(id) ON UPDATE CASCADE ON DELETE SET NULL; + +ALTER TABLE instrument_group +ADD FOREIGN KEY (created_by) REFERENCES profile(id) ON UPDATE CASCADE ON DELETE SET DEFAULT, +ADD FOREIGN KEY (updated_by) REFERENCES profile(id) ON UPDATE CASCADE ON DELETE SET NULL; + +ALTER TABLE instrument +ADD FOREIGN KEY (created_by) REFERENCES profile(id) ON UPDATE CASCADE ON DELETE SET DEFAULT, +ADD FOREIGN KEY (updated_by) REFERENCES profile(id) ON UPDATE CASCADE ON DELETE SET NULL; + +ALTER TABLE alert_config +ADD FOREIGN KEY (created_by) REFERENCES profile(id) ON UPDATE CASCADE ON DELETE SET DEFAULT, +ADD FOREIGN KEY (updated_by) REFERENCES profile(id) ON UPDATE CASCADE ON DELETE SET NULL; + +ALTER TABLE instrument_note +ADD FOREIGN KEY (created_by) REFERENCES profile(id) ON UPDATE CASCADE ON DELETE SET DEFAULT, +ADD FOREIGN KEY (updated_by) REFERENCES profile(id) ON UPDATE CASCADE ON DELETE SET NULL; + +ALTER TABLE collection_group +ADD FOREIGN KEY (created_by) REFERENCES profile(id) ON UPDATE CASCADE ON DELETE SET DEFAULT, +ADD FOREIGN KEY (updated_by) REFERENCES profile(id) ON UPDATE CASCADE ON DELETE SET NULL; + +ALTER TABLE plot_configuration +ADD FOREIGN KEY (created_by) REFERENCES profile(id) ON UPDATE CASCADE ON DELETE SET DEFAULT, +ADD FOREIGN KEY (updated_by) REFERENCES profile(id) ON UPDATE CASCADE ON DELETE SET NULL; + +ALTER TABLE evaluation +ADD FOREIGN KEY (created_by) REFERENCES profile(id) ON UPDATE CASCADE ON DELETE SET DEFAULT, +ADD FOREIGN KEY (updated_by) REFERENCES profile(id) ON UPDATE CASCADE ON DELETE SET NULL; diff --git a/api/migrations/schema/V1.21.00__uploader_config_offset.sql b/api/migrations/schema/V1.21.00__uploader_config_offset.sql new file mode 100644 index 00000000..6c7c3f4e --- /dev/null +++ b/api/migrations/schema/V1.21.00__uploader_config_offset.sql @@ -0,0 +1,3 @@ +ALTER TABLE uploader_config +ADD COLUMN column_offset integer not null default 0, +ADD COLUMN row_offset integer not null default 0; diff --git a/api/queries/alert.sql b/api/queries/alert.sql new file mode 100644 index 00000000..f1ceab8e --- /dev/null +++ b/api/queries/alert.sql @@ -0,0 +1,48 @@ +-- name: AlertCreate :exec +insert into alert (alert_config_id) values ($1); + + +-- name: AlertCreateBatch :batchexec +insert into alert (alert_config_id) values ($1); + + +-- name: AlertListForProject :many +select * from v_alert where project_id = $1; + + +-- name: AlertListForInstrument :many +select * from v_alert +where alert_config_id = any( + select id from alert_config_instrument + where instrument_id = $1 +); + + +-- name: AlertListForProfile :many +select a.*, + case when r.alert_id is not null then true else false + end as read +from v_alert a +left join alert_read r on r.alert_id = a.id +inner join alert_profile_subscription aps on a.alert_config_id = aps.alert_config_id +where aps.profile_id = $1; + + +-- name: AlertGet :one +select a.*, + case when r.alert_id is not null then true else false + end as read +from v_alert a +left join alert_read r on r.alert_id = a.id +inner join alert_profile_subscription aps on a.alert_config_id = aps.alert_config_id +where aps.profile_id = $1 +and a.id = $2; + + +-- name: AlertReadCreate :exec +insert into alert_read (profile_id, alert_id) values ($1, $2) +on conflict do nothing; + + +-- name: AlertReadDelete :exec +delete from alert_read where profile_id = $1 and alert_id = $2; diff --git a/api/queries/alert_check.sql b/api/queries/alert_check.sql new file mode 100644 index 00000000..ee6e5ec1 --- /dev/null +++ b/api/queries/alert_check.sql @@ -0,0 +1,31 @@ +-- name: AlertConfigListUpdateLastCheckedAt :many +update alert_config ac1 +set last_checked_at = now() +from ( + select * + from v_alert_config +) ac2 +where ac1.id = ac2.id +returning ac2.*; + + +-- name: AlertConfigUpdateLastRemindedAt :exec +update alert_config set last_reminded_at = $2 where id = $1; + + +-- name: SubmittalUpdateCompletionDateOrWarningSent :exec +update submittal set + submittal_status_id = $2, + completed_at = $3, + warning_sent = $4 +where id = $1; + + +-- name: SubmittalCreateNextFromNewAlertConfigDate :exec +insert into submittal (alert_config_id, created_at, due_at) +select + ac.id, + sqlc.arg(date)::timestamptz, + sqlc.arg(date)::timestamptz + ac.schedule_interval +from alert_config ac +where ac.id = $1; diff --git a/api/queries/alert_config.sql b/api/queries/alert_config.sql new file mode 100644 index 00000000..35d47cd5 --- /dev/null +++ b/api/queries/alert_config.sql @@ -0,0 +1,94 @@ +-- name: AlertConfigListForProject :many +select * +from v_alert_config +where project_id = $1 +order by name; + + +-- name: AlertConfigListForProjectAlertType :many +select * +from v_alert_config +where project_id = $1 +and alert_type_id = $2 +order by name; + + +-- name: AlertConfigListForInstrument :many +select t.* +from v_alert_config t +inner join alert_config_instrument aci on t.id = aci.alert_config_id +where aci.instrument_id = $1 +order by t.name; + + +-- name: AlertConfigGet :one +select * from v_alert_config where id = $1; + + +-- name: AlertConfigCreate :one +insert into alert_config ( + project_id, + name, + body, + alert_type_id, + started_at, + schedule_interval, + mute_consecutive_alerts, + remind_interval, + warning_interval, + created_by, + created_at +) values ($1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11) +returning id; + + +-- name: AlertConfigInstrumentCreateAssignment :exec +insert into alert_config_instrument (alert_config_id, instrument_id) values ($1, $2); + + +-- name: AlertConfigInstrumentDeleteAssignmentsForAlertConfig :exec +delete from alert_config_instrument where alert_config_id = $1; + + +-- name: SubmittalCreateNextFromExistingAlertConfigDate :exec +insert into submittal (alert_config_id, due_at) +select ac.id, ac.created_at + ac.schedule_interval +from alert_config ac +where ac.id = $1; + + +-- name: AlertConfigUpdate :exec +update alert_config set + name = $3, + body = $4, + started_at = $5, + schedule_interval = $6, + mute_consecutive_alerts = $7, + remind_interval = $8, + warning_interval = $9, + updated_by = $10, + updated_at = $11 +where id = $1 and project_id = $2; + + +-- name: SubmittalUpdateNextForAlertConfig :one +update submittal +set due_at = sq.new_due_at +from ( + select + sub.id as submittal_id, + sub.created_at + ac.schedule_interval as new_due_at + from submittal sub + inner join alert_config ac on sub.alert_config_id = ac.id + where sub.alert_config_id = $1 + and sub.due_at > now() + and sub.completed_at is null + and not sub.marked_as_missing +) sq +where id = sq.submittal_id +and sq.new_due_at > now() +returning id; + + +-- name: AlertConfigDelete :exec +update alert_config set deleted=true where id = $1; diff --git a/api/queries/alert_measurement_check.sql b/api/queries/alert_measurement_check.sql new file mode 100644 index 00000000..b4dbc803 --- /dev/null +++ b/api/queries/alert_measurement_check.sql @@ -0,0 +1,14 @@ +-- name: SubmittalListIncompleteEvaluation :many +select * from v_alert_check_evaluation_submittal +where submittal_id = any( + select id from submittal + where completed_at is null and not marked_as_missing +); + + +-- name: SubmittalListIncompleteMeasurement :many +select * from v_alert_check_measurement_submittal +where submittal_id = any( + select id from submittal + where completed_at is null and not marked_as_missing +); diff --git a/api/queries/alert_subscription.sql b/api/queries/alert_subscription.sql new file mode 100644 index 00000000..150b6e3e --- /dev/null +++ b/api/queries/alert_subscription.sql @@ -0,0 +1,62 @@ +-- name: AlertProfileSubscriptionCreateOnAnyConflictDoNothing :exec +insert into alert_profile_subscription (alert_config_id, profile_id) +values ($1, $2) +on conflict do nothing; + + +-- name: AlertProfileSubscriptionDelete :exec +delete from alert_profile_subscription where alert_config_id = $1 and profile_id = $2; + + +-- name: AlertSubscriptionGet :one +select * from alert_profile_subscription where id = $1; + + +-- name: AlertSubscriptionGetForAlertConfigProfile :one +select * from alert_profile_subscription where alert_config_id = $1 and profile_id = $2; + + +-- name: AlertSubscriptionListForProfile :many +select * from alert_profile_subscription where profile_id = $1; + + +-- name: AlertSubscriptionUpdateForProfile :exec +update alert_profile_subscription set mute_ui=$1, mute_notify=$2 where alert_config_id=$3 and profile_id=$4; + + +-- name: EmailGetOrCreate :one +with e as ( + insert into email (email) values ($1) + on conflict on constraint unique_email do nothing + returning id +) +select id from e +union +select id from email where email = $1 +limit 1; + + +-- name: EmailDelete :exec +delete from email where id = $1; + + +-- name: AlertEmailSubscriptionCreate :exec +insert into alert_email_subscription (alert_config_id, email_id) values ($1,$2) +on conflict on constraint email_unique_alert_config do nothing; + + +-- name: AlertProfileSubscriptionCreate :exec +insert into alert_profile_subscription (alert_config_id, profile_id) values ($1,$2) +on conflict on constraint profile_unique_alert_config do nothing; + + +-- name: AlertEmailSubscriptionDelete :exec +delete from alert_email_subscription where alert_config_id = $1 and email_id = $2; + + +-- name: AlertEmailSubscritpionDeleteForAlertConfig :exec +delete from alert_email_subscription where alert_config_id = $1; + + +-- name: AlertProfileSubscritpionDeleteForAlertConfig :exec +delete from alert_profile_subscription where alert_config_id = $1; diff --git a/api/queries/autocomplete.sql b/api/queries/autocomplete.sql new file mode 100644 index 00000000..b71be996 --- /dev/null +++ b/api/queries/autocomplete.sql @@ -0,0 +1,5 @@ +-- name: EmailAutocompleteList :many +select id, user_type, username, email +from v_email_autocomplete +where username_email ilike '%'||sqlc.arg(search_keyword)||'%' +limit sqlc.arg(result_limit); diff --git a/api/queries/aware.sql b/api/queries/aware.sql new file mode 100644 index 00000000..75659c8c --- /dev/null +++ b/api/queries/aware.sql @@ -0,0 +1,16 @@ +-- name: AwareParameterList :many +select id, key, parameter_id, unit_id from aware_parameter; + + +-- name: AwarePlatformParameterListEnabled :many +select instrument_id, aware_id, aware_parameter_key, timeseries_id +from v_aware_platform_parameter_enabled +order by aware_id, aware_parameter_key; + + +-- name: AwarePlatformCreate :exec +insert into aware_platform (instrument_id, aware_id) values ($1, $2); + + +-- name: AwarePlatformCreateBatch :batchexec +insert into aware_platform (instrument_id, aware_id) values ($1, $2); diff --git a/api/queries/collection_group.sql b/api/queries/collection_group.sql new file mode 100644 index 00000000..24cc4d78 --- /dev/null +++ b/api/queries/collection_group.sql @@ -0,0 +1,36 @@ +-- name: CollectionGroupListForProject :many +select * from collection_group where project_id = $1; + + +-- name: CollectionGroupDetailsGet :one +select * from v_collection_group_details where id = $1; + + +-- name: CollectionGroupCreate :one +insert into collection_group (project_id, name, slug, created_by, created_at, sort_order) +values (sqlc.arg(project_id), sqlc.arg(name)::varchar, slugify(sqlc.arg(name)::varchar, 'collection_group'), sqlc.arg(created_by), sqlc.arg(created_at), sqlc.arg(sort_order)) +returning *; + + +-- name: CollectionGroupUpdate :one +update collection_group set name=$3, updated_by=$4, updated_at=$5, sort_order=$6 +where project_id=$1 and id=$2 +returning *; + + +-- name: CollectionGroupDelete :exec +delete from collection_group where project_id=$1 and id=$2; + + +-- name: CollectionGroupTimeseriesCreate :exec +insert into collection_group_timeseries (collection_group_id, timeseries_id, sort_order) values ($1, $2, $3) +on conflict on constraint collection_group_unique_timeseries do nothing; + + +-- name: CollectionGroupTimeseriesUpdateSortOrder :exec +update collection_group_timeseries set sort_order=$3 +where collection_group_id=$1 and timeseries_id=$2; + + +-- name: CollectionGroupTimeseriesDelete :exec +delete from collection_group_timeseries where collection_group_id=$1 and timeseries_id = $2; diff --git a/api/queries/datalogger.sql b/api/queries/datalogger.sql new file mode 100644 index 00000000..00fb8cff --- /dev/null +++ b/api/queries/datalogger.sql @@ -0,0 +1,84 @@ +-- name: DataloggerGetModelName :one +select model from datalogger_model where id=$1; + + +-- name: DataloggerListForProject :many +select * from v_datalogger where project_id=$1; + + +-- name: DataloggerList :many +select * from v_datalogger; + + +-- name: DataloggerGetActive :one +select exists (select true from v_datalogger where model=$1 and sn=$2); + + +-- name: DataloggerGetExists :one +select true from v_datalogger where id=$1; + + +-- name: DataloggerHashCreate :exec +insert into datalogger_hash (datalogger_id, "hash") values ($1, $2); + + +-- name: DataloggerGet :one +select * from v_datalogger where id=$1; + + +-- name: DataloggerCreate :one +insert into datalogger (name, sn, project_id, created_by, updated_by, slug, model_id) +values ($1, $2, $3, $4, $4, slugify($1, 'datalogger'), $5) +returning id; + + +-- name: DataloggerUpdate :exec +update datalogger set + name=$2, + updated_by=$3, + updated_at=$4 +where id=$1; + + +-- name: DataloggerHashUpdate :exec +update datalogger_hash set "hash"=$2 where datalogger_id=$1; + + +-- name: DataloggerUpdateAuditInfo :exec +update datalogger set updated_by=$2, updated_at=$3 where id=$1; + + +-- name: DataloggerDelete :exec +update datalogger set deleted=true, updated_by=$2, updated_at=$3 where id=$1; + + +-- name: DataloggerTablePreviewGet :one +select * from v_datalogger_preview where datalogger_table_id=$1 limit 1; + + +-- name: DataloggerUpdateTableNameBlank :exec +update datalogger_table set table_name='' where id=$1; + + +-- name: DataloggerTableUpdateNameIfEmpty :exec +update datalogger_table dt +set table_name=$2 +where dt.table_name='' and dt.datalogger_id=$1 +and not exists ( + select 1 from datalogger_table sdt where sdt.datalogger_id=$1 and sdt.table_name=$2 +); + + +-- name: DataloggerTableGetOrCreate :one +with new_datalogger_table as ( + insert into datalogger_table (datalogger_id, table_name) values ($1, $2) + on conflict on constraint datalogger_table_datalogger_id_table_name_key do nothing + returning id +) +select ndt.id from new_datalogger_table ndt +union +select sdt.id from datalogger_table sdt where sdt.datalogger_id=$1 and sdt.table_name=$2; + + +-- name: DataloggerTableDelete :exec +delete from datalogger_table where id=$1; diff --git a/api/queries/datalogger_telemetry.sql b/api/queries/datalogger_telemetry.sql new file mode 100644 index 00000000..837395ff --- /dev/null +++ b/api/queries/datalogger_telemetry.sql @@ -0,0 +1,42 @@ +-- name: DataloggerGetForModelSn :one +select * from v_datalogger +where model = $1 and sn = $2 +limit 1; + + +-- name: DataloggerHashGetForModelSn :one +select "hash" from v_datalogger_hash +where model = $1 and sn = $2 +limit 1; + + +-- name: DataloggerTablePreviewCreate :exec +insert into datalogger_preview (datalogger_table_id, preview, updated_at) values ($1, $2, $3); + + +-- name: DataloggerTablePreviewUpdate :exec +update datalogger_preview set preview = $3, updated_at = $4 +where datalogger_table_id = any(select dt.id from datalogger_table dt where dt.datalogger_id = $1 and dt.table_name = $2); + + +-- name: DataloggerErrorDelete :exec +delete from datalogger_error +where datalogger_table_id = any(select dt.id from datalogger_table dt where dt.datalogger_id = $1 and dt.table_name = $2); + + +-- name: DataloggerErrorCreate :exec +insert into datalogger_error (datalogger_table_id, error_message) +select dt.id, $3 from datalogger_table dt +where dt.datalogger_id = $1 and dt.table_name = $2 +and not exists ( + select 1 from datalogger_table sdt where sdt.datalogger_id = $1 and sdt.table_name = $2 +); + + +-- name: DataloggerErrorCreateBatch :batchexec +insert into datalogger_error (datalogger_table_id, error_message) +select dt.id, $3 from datalogger_table dt +where dt.datalogger_id = $1 and dt.table_name = $2 +and not exists ( + select 1 from datalogger_table sdt where sdt.datalogger_id = $1 and sdt.table_name = $2 +); diff --git a/api/queries/district_rollup.sql b/api/queries/district_rollup.sql new file mode 100644 index 00000000..6bb93396 --- /dev/null +++ b/api/queries/district_rollup.sql @@ -0,0 +1,14 @@ +-- name: DistrictRollupListEvaluationForProjectAlertConfig :many +select * from v_district_rollup +where alert_type_id = 'da6ee89e-58cc-4d85-8384-43c3c33a68bd'::uuid +and project_id=sqlc.arg(project_id) +and "month" >= date_trunc('month', sqlc.arg(start_month_time)::timestamptz) +and "month" <= date_trunc('month', sqlc.arg(end_month_time)::timestamptz); + + +-- name: DistrictRollupListMeasurementForProjectAlertConfig :many +select * from v_district_rollup +where alert_type_id = '97e7a25c-d5c7-4ded-b272-1bb6e5914fe3'::uuid +and project_id=sqlc.arg(project_id) +and "month" >= date_trunc('month', sqlc.arg(start_month_time)::timestamptz) +and "month" <= date_trunc('month', sqlc.arg(end_month_time)::timestamptz); diff --git a/api/queries/domain.sql b/api/queries/domain.sql new file mode 100644 index 00000000..3a3f781e --- /dev/null +++ b/api/queries/domain.sql @@ -0,0 +1,10 @@ +-- name: PgTimezoneNamesList :many +select name, abbrev, utc_offset::text, is_dst from pg_catalog.pg_timezone_names; + + +-- name: DomainList :many +select * from v_domain; + + +-- name: DomainGroupList :many +select * from v_domain_group; diff --git a/api/queries/equivalency_table.sql b/api/queries/equivalency_table.sql new file mode 100644 index 00000000..e1a8d0cd --- /dev/null +++ b/api/queries/equivalency_table.sql @@ -0,0 +1,47 @@ +-- name: DataloggerTableGetIsValid :one +select not exists (select * from datalogger_table where id = $1 and table_name = 'preparse'); + + +-- name: EquivalencyTableTimeseriesGetIsValid :one +select not exists ( + select id from v_timeseries_computed + where id = $1 + union all + select timeseries_id from instrument_constants + where timeseries_id = $1 +); + + +-- name: EquivalencyTableGet :one +select + datalogger_id, + datalogger_table_id, + datalogger_table_name, + fields +from v_datalogger_equivalency_table +where datalogger_table_id = $1; + + +-- name: EquivalencyTableCreateOrUpdate :exec +insert into datalogger_equivalency_table +(datalogger_id, datalogger_table_id, field_name, display_name, instrument_id, timeseries_id) +values ($1, $2, $3, $4, $5, $6) +on conflict on constraint datalogger_equivalency_table_datalogger_table_id_field_name_key +do update set display_name = excluded.display_name, instrument_id = excluded.instrument_id, timeseries_id = excluded.timeseries_id; + + +-- name: EquivalencyTableUpdate :exec +update datalogger_equivalency_table set + field_name = $2, + display_name = $3, + instrument_id = $4, + timeseries_id = $5 +where id = $1; + + +-- name: EquivalencyTableDeleteForDataloggerTable :exec +delete from datalogger_equivalency_table where datalogger_table_id = $1; + + +-- name: EquivalencyTableDelete :exec +delete from datalogger_equivalency_table where id = $1; diff --git a/api/queries/evaluation.sql b/api/queries/evaluation.sql new file mode 100644 index 00000000..cb4d8cb4 --- /dev/null +++ b/api/queries/evaluation.sql @@ -0,0 +1,97 @@ +-- name: EvaluationListForProject :many +select * +from v_evaluation +where project_id=$1; + + +-- name: EvaluationListForProjectAlertConfig :many +select * from v_evaluation +where project_id=$1 +and alert_config_id is not null +and alert_config_id=$2; + + +-- name: EvaluationListForInstrument :many +select * from v_evaluation +where id = any( + select evaluation_id + from evaluation_instrument + where instrument_id=$1 +); + + +-- name: EvaluationGet :one +select * from v_evaluation where id=$1; + + +-- name: SubmittalUpdateCompleteEvaluation :one +update submittal sub1 set + submittal_status_id = sq.submittal_status_id, + completed_at = now() +from ( + select + sub2.id as submittal_id, + case + -- if completed before due date, mark submittal as green id + when now() <= sub2.due_at then '0c0d6487-3f71-4121-8575-19514c7b9f03'::uuid + -- if completed after due date, mark as yellow + else 'ef9a3235-f6e2-4e6c-92f6-760684308f7f'::uuid + end as submittal_status_id + from submittal sub2 + inner join alert_config ac on sub2.alert_config_id = ac.id + where sub2.id=$1 + and sub2.completed_at is null + and not sub2.marked_as_missing + and ac.alert_type_id = 'da6ee89e-58cc-4d85-8384-43c3c33a68bd'::uuid +) sq +where sub1.id = sq.submittal_id +returning sub1.*; + + +-- name: SubmittalCreateNextEvaluation :exec +insert into submittal (alert_config_id, due_at) +select + ac.id, + now() + ac.schedule_interval +from alert_config ac +where ac.id = any(select sub.alert_config_id from submittal sub where sub.id=$1); + + +-- name: EvaluationCreate :one +insert into evaluation ( + project_id, + submittal_id, + name, + body, + started_at, + ended_at, + created_by, + created_at +) values ($1,$2,$3,$4,$5,$6,$7,$8) +returning id; + + +-- name: EvaluationInstrumentCreate :exec +insert into evaluation_instrument (evaluation_id, instrument_id) values ($1,$2); + + +-- name: EvaluationInstrumentCreateBatch :batchexec +insert into evaluation_instrument (evaluation_id, instrument_id) values ($1,$2); + + +-- name: EvaluationUpdate :exec +update evaluation set + name=$3, + body=$4, + started_at=$5, + ended_at=$6, + updated_by=$7, + updated_at=$8 +where id=$1 and project_id=$2; + + +-- name: EvaluationInstrumentDeleteForEvaluation :exec +delete from evaluation_instrument where evaluation_id=$1; + +-- name: EvaluationDelete :exec +delete from evaluation where id=$1; diff --git a/api/queries/heartbeat.sql b/api/queries/heartbeat.sql new file mode 100644 index 00000000..20890d7a --- /dev/null +++ b/api/queries/heartbeat.sql @@ -0,0 +1,12 @@ +-- name: HeartbeatCreate :one +insert into heartbeat (time) values ($1) returning *; + + +-- name: HeartbeatGetLatest :one +select max(time)::timestamptz from heartbeat; + + +-- name: HeartbeatList :many +select * from heartbeat +order by time desc +limit sqlc.arg(result_limit); diff --git a/api/queries/home.sql b/api/queries/home.sql new file mode 100644 index 00000000..0a03db7b --- /dev/null +++ b/api/queries/home.sql @@ -0,0 +1,7 @@ +-- name: HomeGet :one +select + (select count(*) from instrument where not deleted) as instrument_count, + (select count(*) from project where not deleted) as project_count, + (select count(*) from instrument_group) as instrument_group_count, + (select count(*) from instrument where not deleted and created_at > now() - '7 days'::interval) as new_instruments_7d, + (select count(*) from timeseries_measurement where time > now() - '2 hours'::interval) as new_measurements_2h; diff --git a/api/queries/instrument.sql b/api/queries/instrument.sql new file mode 100644 index 00000000..783e4046 --- /dev/null +++ b/api/queries/instrument.sql @@ -0,0 +1,121 @@ +-- name: InstrumentListForProject :many +select i.* +from v_instrument i +inner join project_instrument pi on pi.instrument_id = i.id +where pi.project_id = $1; + + +-- name: InstrumentListForInstrumentGroup :many +select i.* +from v_instrument i +inner join instrument_group_instruments igi on igi.instrument_id = i.id +where instrument_group_id = $1; + + +-- name: InstrumentGet :one +select * +from v_instrument +where id = $1; + + +-- name: InstrumentGetCount :one +select count(*) from instrument where not deleted; + + +-- name: InstrumentCreate :one +insert into instrument (slug, name, type_id, geometry, station, station_offset, created_by, created_at, nid_id, usgs_id, show_cwms_tab) +values ( + slugify(sqlc.arg(name), 'instrument'), + sqlc.arg(name), + sqlc.arg(type_id), + ST_SetSRID(ST_GeomFromGeoJSON(sqlc.arg(geometry)::json), 4326), + sqlc.arg(station), + sqlc.arg(station_offset), + sqlc.arg(created_by), + sqlc.arg(created_at), + sqlc.arg(nid_id), + sqlc.arg(usgs_id), + sqlc.arg(show_cwms_tab) +) returning id, slug; + + +-- name: InstrumentCreateBatch :batchone +insert into instrument (slug, name, type_id, geometry, station, station_offset, created_by, created_at, nid_id, usgs_id, show_cwms_tab) +values ( + slugify(sqlc.arg(name), 'instrument'), + sqlc.arg(name), + sqlc.arg(type_id), + ST_SetSRID(ST_GeomFromGeoJSON(sqlc.arg(geometry)::json), 4326), + sqlc.arg(station), + sqlc.arg(station_offset), + sqlc.arg(created_by), + sqlc.arg(created_at), + sqlc.arg(nid_id), + sqlc.arg(usgs_id), + sqlc.arg(show_cwms_tab) +) +returning id, slug; + + +-- name: ProjectInstrumentListProjectIDForInstrument :many +select project_id from project_instrument where instrument_id = $1; + + +-- name: ProjectInstrumentListCountByInstrument :many +select pi.instrument_id, i.name as instrument_name, count(pi.*) as project_count +from project_instrument pi +inner join instrument i on pi.instrument_id = i.id +where pi.instrument_id = any(sqlc.arg(instrument_ids)::uuid[]) +group by pi.instrument_id, i.name +order by i.name; + + +-- name: InstrumentUpdate :exec +update instrument set + name=sqlc.arg(name), + type_id=sqlc.arg(type_id), + geometry=ST_SetSRID(ST_GeomFromGeoJSON(sqlc.arg(geometry)::json), 4326), + updated_by=sqlc.arg(updated_by), + updated_at=sqlc.arg(updated_at), + station=sqlc.arg(station), + station_offset=sqlc.arg(station_offset), + nid_id=sqlc.arg(nid_id), + usgs_id=sqlc.arg(usgs_id), + show_cwms_tab=sqlc.arg(show_cwms_tab) +where id = sqlc.arg(id) +and id = any( + select instrument_id + from project_instrument + where project_id = sqlc.arg(project_id) +); + + +-- name: InstrumentUpdateGeometry :one +update instrument set + geometry=ST_SetSRID(ST_GeomFromGeoJSON(sqlc.arg(geometry)::json), 4326), + updated_by=sqlc.arg(updated_by), + updated_at=now() +where id = sqlc.arg(id) +and id = any( + select instrument_id + from project_instrument + where project_id = sqlc.arg(project_id) +) +returning id; + + +-- name: InstrumentDeleteFlag :exec +update instrument set deleted=true +where id = any( + select instrument_id + from project_instrument + where project_id = $1 +) +and id = $2; + + +-- name: InstrumentIDNameListByIDs :many +select id, name +from instrument +where id = any(sqlc.arg(instrument_ids)::uuid[]) +and not deleted; diff --git a/api/queries/instrument_assign.sql b/api/queries/instrument_assign.sql new file mode 100644 index 00000000..1ef0c5fb --- /dev/null +++ b/api/queries/instrument_assign.sql @@ -0,0 +1,66 @@ +-- name: ProjectInstrumentCreate :exec +insert into project_instrument (project_id, instrument_id) values ($1, $2) +on conflict on constraint project_instrument_project_id_instrument_id_key do nothing; + + +-- name: ProjectInstrumentCreateBatch :batchexec +insert into project_instrument (project_id, instrument_id) values ($1, $2) +on conflict on constraint project_instrument_project_id_instrument_id_key do nothing; + + +-- name: ProjectInstrumentDelete :exec +delete from project_instrument where project_id = $1 and instrument_id = $2; + + +-- name: ProjectInstrumentDeleteBatch :batchexec +delete from project_instrument where project_id = $1 and instrument_id = $2; + + +-- name: ProjectInstrumentListForProjectInstrumentNames :many +select i.name +from project_instrument pi +inner join instrument i on pi.instrument_id = i.id +where pi.project_id = sqlc.arg(project_id) +and i.name = any(sqlc.arg(instrument_names)::text[]) +and not i.deleted; + + +-- name: ProjectInstrumentListForInstrumentNameProjects :many +select i.name instrument_name +from project_instrument pi +inner join instrument i on pi.instrument_id = i.id +inner join project p on pi.project_id = p.id +where i.name = sqlc.arg(instrument_name) +and pi.project_id = any(sqlc.arg(project_ids)::uuid[]) +and not i.deleted +order by pi.project_id; + + +-- name: ProjectInstrumentListForInstrumentsProfileAdmin :many +select p.name as project_name, i.name as instrument_name +from project_instrument pi +inner join project p on pi.project_id = p.id +inner join instrument i on pi.instrument_id = i.id +where pi.instrument_id = any(sqlc.arg(instrument_ids)::uuid[]) +and not exists ( + select 1 from v_profile_project_roles ppr + where ppr.profile_id = sqlc.arg(profile_id) + and (ppr.is_admin or (ppr.project_id = pi.project_id and ppr.role = 'ADMIN')) +) +and not i.deleted; + + +-- name: ProjectInstrumentListForInstrumentProjectsProfileAdmin :many +select p.name +from project_instrument pi +inner join project p on pi.project_id = p.id +inner join instrument i on pi.instrument_id = i.id +where pi.instrument_id = sqlc.arg(instrument_id) +and pi.project_id = any(sqlc.arg(project_ids)::uuid[]) +and not exists ( + select 1 from v_profile_project_roles ppr + where profile_id = sqlc.arg(profile_id) + and (ppr.is_admin or (ppr.project_id = pi.project_id and ppr.role = 'ADMIN')) +) +and not i.deleted +order by p.name; diff --git a/api/queries/instrument_constant.sql b/api/queries/instrument_constant.sql new file mode 100644 index 00000000..58e3b112 --- /dev/null +++ b/api/queries/instrument_constant.sql @@ -0,0 +1,16 @@ +-- name: InstrumentConstantList :many +select t.* from v_timeseries t +inner join instrument_constants ic on ic.timeseries_id = t.id +where ic.instrument_id = $1; + + +-- name: InstrumentConstantCreate :exec +insert into instrument_constants (instrument_id, timeseries_id) values ($1, $2); + + +-- name: InstrumentConstantCreateBatch :batchexec +insert into instrument_constants (instrument_id, timeseries_id) values ($1, $2); + + +-- name: InstrumentConstantDelete :exec +delete from instrument_constants where instrument_id = $1 and timeseries_id = $2; diff --git a/api/queries/instrument_group.sql b/api/queries/instrument_group.sql new file mode 100644 index 00000000..9c426e71 --- /dev/null +++ b/api/queries/instrument_group.sql @@ -0,0 +1,50 @@ +-- name: InstrumentGroupList :many +select * +from v_instrument_group; + + +-- name: InstrumentGroupGet :one +select * +from v_instrument_group +where id=$1; + + +-- name: InstrumentGroupListForProject :many +select ig.* +from v_instrument_group ig +where ig.project_id = $1; + + +-- name: InstrumentGroupCreate :one +insert into instrument_group (slug, name, description, created_by, created_at, project_id) +values (slugify($1, 'instrument_group'), $1, $2, $3, $4, $5) +returning id, slug, name, description, created_by, created_at, updated_by, updated_at, project_id; + + +-- name: InstrumentGroupCreateBatch :batchone +insert into instrument_group (slug, name, description, created_by, created_at, project_id) +values (slugify($1, 'instrument_group'), $1, $2, $3, $4, $5) +returning id, slug, name, description, created_by, created_at, updated_by, updated_at, project_id; + + +-- name: InstrumentGroupUpdate :one +update instrument_group set + name = $2, + description = $3, + updated_by = $4, + updated_at = $5, + project_id = $6 + where id = $1 +returning id, slug, name, description, created_by, created_at, updated_by, updated_at, project_id; + + +-- name: InstrumentGroupDeleteFlag :exec +update instrument_group set deleted = true where id = $1; + + +-- name: InstrumentGroupInstrumentCreate :exec +insert into instrument_group_instruments (instrument_group_id, instrument_id) values ($1, $2); + + +-- name: InstrumentGroupInstrumentDelete :exec +delete from instrument_group_instruments where instrument_group_id = $1 and instrument_id = $2; diff --git a/api/queries/instrument_incl.sql b/api/queries/instrument_incl.sql new file mode 100644 index 00000000..0d783dc0 --- /dev/null +++ b/api/queries/instrument_incl.sql @@ -0,0 +1,82 @@ +-- name: InclOptsCreate :exec +insert into incl_opts (instrument_id, num_segments, bottom_elevation_timeseries_id, initial_time) +values ($1, $2, $3, $4); + + +-- name: InclOptsCreateBatch :batchexec +insert into incl_opts (instrument_id, num_segments, bottom_elevation_timeseries_id, initial_time) +values ($1, $2, $3, $4); + + +-- name: InclOptsUpdate :exec +update incl_opts set + bottom_elevation_timeseries_id = $2, + initial_time = $3 +where instrument_id = $1; + + +-- name: InclOptsUpdateBatch :batchexec +update incl_opts set + bottom_elevation_timeseries_id = $2, + initial_time = $3 +where instrument_id = $1; + + +-- name: InclSegmentListForInstrument :many +select * from v_incl_segment where instrument_id = $1; + + +-- name: InclSegmentCreate :exec +insert into incl_segment ( + id, + instrument_id, + depth_timeseries_id, + a0_timeseries_id, + a180_timeseries_id, + b0_timeseries_id, + b180_timeseries_id +) values ($1, $2, $3, $4, $5, $6, $7); + + +-- name: InclSegmentCreateBatch :batchexec +insert into incl_segment ( + id, + instrument_id, + depth_timeseries_id, + a0_timeseries_id, + a180_timeseries_id, + b0_timeseries_id, + b180_timeseries_id +) values ($1, $2, $3, $4, $5, $6, $7); + + +-- name: InclSegmentUpdate :exec +update incl_segment set + depth_timeseries_id=$3, + a0_timeseries_id=$4, + a180_timeseries_id=$5, + b0_timeseries_id=$6, + b180_timeseries_id=$7 +where id = $1 and instrument_id = $2; + + +-- name: InclSegmentUpdateBatch :batchexec +update incl_segment set + depth_timeseries_id=$3, + a0_timeseries_id=$4, + a180_timeseries_id=$5, + b0_timeseries_id=$6, + b180_timeseries_id=$7 +where id = $1 and instrument_id = $2; + + +-- name: InclMeasurementListForInstrumentRange :many +select m1.instrument_id, m1.time, m1.measurements +from v_incl_measurement m1 +where m1.instrument_id=sqlc.arg(instrument_id) and m1.time >= sqlc.arg(start_time) and m1.time <= sqlc.arg(end_time) +union +select m2.instrument_id, m2.time, m2.measurements +from v_incl_measurement m2 +where m2.time = any(select o.initial_time from incl_opts o where o.instrument_id = sqlc.arg(instrument_id)) +and m2.instrument_id = sqlc.arg(instrument_id) +order by time asc; diff --git a/api/queries/instrument_ipi.sql b/api/queries/instrument_ipi.sql new file mode 100644 index 00000000..dadd858b --- /dev/null +++ b/api/queries/instrument_ipi.sql @@ -0,0 +1,78 @@ +-- name: IpiOptsCreate :exec +insert into ipi_opts (instrument_id, num_segments, bottom_elevation_timeseries_id, initial_time) +values ($1, $2, $3, $4); + + +-- name: IpiOptsCreateBatch :batchexec +insert into ipi_opts (instrument_id, num_segments, bottom_elevation_timeseries_id, initial_time) +values ($1, $2, $3, $4); + + +-- name: IpiOptsUpdate :exec +update ipi_opts set + bottom_elevation_timeseries_id = $2, + initial_time = $3 +where instrument_id = $1; + + +-- name: IpiOptsUpdateBatch :batchexec +update ipi_opts set + bottom_elevation_timeseries_id = $2, + initial_time = $3 +where instrument_id = $1; + + +-- name: IpiSegmentListForInstrument :many +select * from v_ipi_segment where instrument_id = $1; + + +-- name: IpiSegmentCreate :exec +insert into ipi_segment ( + id, + instrument_id, + length_timeseries_id, + tilt_timeseries_id, + inc_dev_timeseries_id, + temp_timeseries_id +) values ($1, $2, $3, $4, $5, $6); + + +-- name: IpiSegmentCreateBatch :batchexec +insert into ipi_segment ( + id, + instrument_id, + length_timeseries_id, + tilt_timeseries_id, + inc_dev_timeseries_id, + temp_timeseries_id +) values ($1, $2, $3, $4, $5, $6); + + +-- name: IpiSegmentUpdate :exec +update ipi_segment set + length_timeseries_id = $3, + tilt_timeseries_id = $4, + inc_dev_timeseries_id = $5, + temp_timeseries_id = $6 +where id = $1 and instrument_id = $2; + + +-- name: IpiSegmentUpdateBatch :batchexec +update ipi_segment set + length_timeseries_id = $3, + tilt_timeseries_id = $4, + inc_dev_timeseries_id = $5, + temp_timeseries_id = $6 +where id = $1 and instrument_id = $2; + + +-- name: IpiMeasurementListForInstrumentRange :many +select m1.instrument_id, m1.time, m1.measurements +from v_ipi_measurement m1 +where m1.instrument_id=sqlc.arg(instrument_id) and m1.time >= sqlc.arg(start_time) and m1.time <= sqlc.arg(end_time) +union +select m2.instrument_id, m2.time, m2.measurements +from v_ipi_measurement m2 +where m2.time = any(select o.initial_time from ipi_opts o where o.instrument_id = sqlc.arg(instrument_id)) +and m2.instrument_id = sqlc.arg(instrument_id) +order by time asc; diff --git a/api/queries/instrument_note.sql b/api/queries/instrument_note.sql new file mode 100644 index 00000000..16681a33 --- /dev/null +++ b/api/queries/instrument_note.sql @@ -0,0 +1,37 @@ +-- name: InstrumentNoteGet :one +select * +from instrument_note +where id = $1; + + +-- name: InstrumentNoteListForInstrument :many +select * +from instrument_note +where instrument_id = $1; + + +-- name: InstrumentNoteCreate :one +insert into instrument_note (instrument_id, title, body, time, created_by, created_at) +values ($1, $2, $3, $4, $5, $6) +returning id, instrument_id, title, body, time, created_by, created_at, updated_by, updated_at; + + +-- name: InstrumentNoteCreateBatch :batchone +insert into instrument_note (instrument_id, title, body, time, created_by, created_at) +values ($1, $2, $3, $4, $5, $6) +returning id, instrument_id, title, body, time, created_by, created_at, updated_by, updated_at; + + +-- name: InstrumentNoteUpdate :one +update instrument_note set + title=$2, + body=$3, + time=$4, + updated_by=$5, + updated_at=$6 +where id = $1 +returning *; + + +-- name: InstrumentNoteDelete :exec +delete from instrument_note where id = $1; diff --git a/api/queries/instrument_saa.sql b/api/queries/instrument_saa.sql new file mode 100644 index 00000000..708344e8 --- /dev/null +++ b/api/queries/instrument_saa.sql @@ -0,0 +1,82 @@ +-- name: SaaOptsCreate :exec +insert into saa_opts (instrument_id, num_segments, bottom_elevation_timeseries_id, initial_time) +values ($1, $2, $3, $4); + + +-- name: SaaOptsCreateBatch :batchexec +insert into saa_opts (instrument_id, num_segments, bottom_elevation_timeseries_id, initial_time) +values ($1, $2, $3, $4); + + +-- name: SaaOptsUpdate :exec +update saa_opts set + bottom_elevation_timeseries_id = $2, + initial_time = $3 +where instrument_id = $1; + + +-- name: SaaOptsUpdateBatch :batchexec +update saa_opts set + bottom_elevation_timeseries_id = $2, + initial_time = $3 +where instrument_id = $1; + + +-- name: SaaSegmentListForInstrument :many +select * from v_saa_segment where instrument_id = $1; + + +-- name: SaaSegmentCreate :exec +insert into saa_segment ( + id, + instrument_id, + length_timeseries_id, + x_timeseries_id, + y_timeseries_id, + z_timeseries_id, + temp_timeseries_id +) values ($1, $2, $3, $4, $5, $6, $7); + + +-- name: SaaSegmentCreateBatch :batchexec +insert into saa_segment ( + id, + instrument_id, + length_timeseries_id, + x_timeseries_id, + y_timeseries_id, + z_timeseries_id, + temp_timeseries_id +) values ($1, $2, $3, $4, $5, $6, $7); + + +-- name: SaaSegmentUpdate :exec +update saa_segment set + length_timeseries_id = $3, + x_timeseries_id = $4, + y_timeseries_id = $5, + z_timeseries_id = $6, + temp_timeseries_id = $7 +where id = $1 and instrument_id = $2; + + +-- name: SaaSegmentUpdateBatch :batchexec +update saa_segment set + length_timeseries_id = $3, + x_timeseries_id = $4, + y_timeseries_id = $5, + z_timeseries_id = $6, + temp_timeseries_id = $7 +where id = $1 and instrument_id = $2; + + +-- name: SaaMeasurementListForInstrumentRange :many +select m1.instrument_id, m1.time, m1.measurements +from v_saa_measurement m1 +where m1.instrument_id = sqlc.arg(instrument_id) and m1.time >= sqlc.arg(start_time) and m1.time <= sqlc.arg(end_time) +union +select m2.instrument_id, m2.time, m2.measurements +from v_saa_measurement m2 +where m2.time = any(select o.initial_time from saa_opts o where o.instrument_id = sqlc.arg(instrument_id)) +and m2.instrument_id = sqlc.arg(instrument_id) +order by time asc; diff --git a/api/queries/instrument_status.sql b/api/queries/instrument_status.sql new file mode 100644 index 00000000..b12bb175 --- /dev/null +++ b/api/queries/instrument_status.sql @@ -0,0 +1,23 @@ +-- name: InstrumentStatusListForInstrument :many +select * from v_instrument_status +where instrument_id=$1 +order by time desc; + + +-- name: InstrumentStatusGet :one +select * from v_instrument_status +where id=$1; + + +-- name: InstrumentStatusCreateOrUpdate :exec +insert into instrument_status (instrument_id, status_id, time) values ($1, $2, $3) +on conflict on constraint instrument_unique_status_in_time do update set status_id = excluded.status_id; + + +-- name: InstrumentStatusCreateOrUpdateBatch :batchexec +insert into instrument_status (instrument_id, status_id, time) values ($1, $2, $3) +on conflict on constraint instrument_unique_status_in_time do update set status_id = excluded.status_id; + + +-- name: InstrumentStatusDelete :exec +delete from instrument_status where id = $1; diff --git a/api/queries/measurement.sql b/api/queries/measurement.sql new file mode 100644 index 00000000..d84070bc --- /dev/null +++ b/api/queries/measurement.sql @@ -0,0 +1,97 @@ +-- name: TimeseriesMeasurementListForRange :many +select * from v_timeseries_measurement +where timeseries_id=sqlc.arg(timeseries_id) +and time > sqlc.arg(after) +and time < sqlc.arg(before); + + +-- name: TimeseriesMeasurementGetMostRecent :one +select * +from timeseries_measurement +where timeseries_id = sqlc.arg(timeseries_id) +order by time desc +limit 1; + + +-- name: TimeseriesMeasurementCreate :exec +insert into timeseries_measurement (timeseries_id, time, value) values ($1, $2, $3) +on conflict on constraint timeseries_unique_time do nothing; + + +-- name: TimeseriesMeasurementCreateBatch :batchexec +insert into timeseries_measurement (timeseries_id, time, value) values ($1, $2, $3) +on conflict on constraint timeseries_unique_time do nothing; + + +-- name: TimeseriesMeasurementCreateOrUpdate :exec +insert into timeseries_measurement (timeseries_id, time, value) values ($1, $2, $3) +on conflict on constraint timeseries_unique_time do update set value = excluded.value; + + +-- name: TimeseriesMeasurementCreateOrUpdateBatch :batchexec +insert into timeseries_measurement (timeseries_id, time, value) values ($1, $2, $3) +on conflict on constraint timeseries_unique_time do update set value = excluded.value; + + +-- name: TimeseriesMeasurementCreateOrUpdateAtTimezoneBatch :batchexec +insert into timeseries_measurement (timeseries_id, time, value) +values (sqlc.arg(timeseries_id), ((sqlc.arg(local_time)::timestamp at time zone sqlc.arg(timezone)::text) at time zone 'UTC')::timestamptz, sqlc.arg(value)) +on conflict on constraint timeseries_unique_time do update set value = excluded.value; + + +-- name: TimeseriesNoteCreate :exec +insert into timeseries_notes (timeseries_id, time, masked, validated, annotation) values ($1, $2, $3, $4, $5) +on conflict on constraint notes_unique_time do nothing; + + +-- name: TimeseriesNoteCreateBatch :batchexec +insert into timeseries_notes (timeseries_id, time, masked, validated, annotation) values ($1, $2, $3, $4, $5) +on conflict on constraint notes_unique_time do nothing; + + +-- name: TimeseriesNoteCreateOrUpdateAtTimezoneBatch :batchexec +insert into timeseries_notes (timeseries_id, time, masked, validated, annotation) +values (sqlc.arg(timeseries_id), ((sqlc.arg(local_time)::timestamp at time zone sqlc.arg(timezone)::text) at time zone 'UTC')::timestamptz, sqlc.arg(masked), sqlc.arg(validated), sqlc.arg(annotation)) +on conflict on constraint notes_unique_time do nothing; + + +-- name: TimeseriesNoteCreateOrUpdate :exec +insert into timeseries_notes (timeseries_id, time, masked, validated, annotation) values ($1, $2, $3, $4, $5) +on conflict on constraint notes_unique_time do update set masked = excluded.masked, validated = excluded.validated, annotation = excluded.annotation; + + +-- name: TimeseriesNoteCreateOrUpdateBatch :batchexec +insert into timeseries_notes (timeseries_id, time, masked, validated, annotation) values ($1, $2, $3, $4, $5) +on conflict on constraint notes_unique_time do update set masked = excluded.masked, validated = excluded.validated, annotation = excluded.annotation; + + +-- name: TimeseriesMeasurementDelete :exec +delete from timeseries_measurement where timeseries_id=$1 and time=$2; + + +-- name: TimeseriesMeasurementDeleteBatch :batchexec +delete from timeseries_measurement where timeseries_id=$1 and time=$2; + + +-- name: TimeseriesMeasurementDeleteRange :exec +delete from timeseries_measurement where timeseries_id = sqlc.arg(timeseries_id) and time > sqlc.arg(after) and time < sqlc.arg(before); + + +-- name: TimeseriesMeasurementDeleteRangeBatch :batchexec +delete from timeseries_measurement where timeseries_id = sqlc.arg(timeseries_id) and time > sqlc.arg(after) and time < sqlc.arg(before); + + +-- name: TimeseriesNoteDelete :exec +delete from timeseries_notes where timeseries_id=$1 and time=$2; + + +-- name: TimeseriesNoteDeleteBatch :batchexec +delete from timeseries_notes where timeseries_id=$1 and time=$2; + + +-- name: TimeseriesNoteDeleteRange :exec +delete from timeseries_notes where timeseries_id = sqlc.arg(timeseries_id) and time > sqlc.arg(after) and time < sqlc.arg(before); + + +-- name: TimeseriesNoteDeleteRangeBatch :batchexec +delete from timeseries_notes where timeseries_id = sqlc.arg(timeseries_id) and time > sqlc.arg(after) and time < sqlc.arg(before); diff --git a/api/queries/plot_config.sql b/api/queries/plot_config.sql new file mode 100644 index 00000000..ac4fffd7 --- /dev/null +++ b/api/queries/plot_config.sql @@ -0,0 +1,32 @@ +-- name: PlotConfigListForProject :many +select * +from v_plot_configuration +where project_id = $1; + + +-- name: PlotConfigGet :one +select * +from v_plot_configuration +where id = $1; + + +-- name: PlotConfigCreate :one +insert into plot_configuration (slug, name, project_id, created_by, created_at, plot_type) values (slugify($1, 'plot_configuration'), $1, $2, $3, $4, $5) +returning id; + + +-- name: PlotConfigSettingsCreate :exec +insert into plot_configuration_settings (id, show_masked, show_nonvalidated, show_comments, auto_range, date_range, threshold) +values ($1, $2, $3, $4, $5, $6, $7); + + +-- name: PlotConfigUpdate :exec +update plot_configuration set name = $3, updated_by = $4, updated_at = $5 where project_id = $1 and id = $2; + + +-- name: PlotConfigDelete :exec +delete from plot_configuration where project_id = $1 and id = $2; + + +-- name: PlotConfigSettingsDelete :exec +delete from plot_configuration_settings where id = $1; diff --git a/api/queries/plot_config_bullseye.sql b/api/queries/plot_config_bullseye.sql new file mode 100644 index 00000000..e9b5264a --- /dev/null +++ b/api/queries/plot_config_bullseye.sql @@ -0,0 +1,32 @@ +-- name: PlotBullseyeConfigCreate :exec +insert into plot_bullseye_config (plot_config_id, x_axis_timeseries_id, y_axis_timeseries_id) values ($1, $2, $3); + + +-- name: PlotBullseyeConfigUpdate :exec +UPDATE plot_bullseye_config SET x_axis_timeseries_id=$2, y_axis_timeseries_id=$3 WHERE plot_config_id=$1; + + +-- name: PlotBullseyeConfigDelete :exec +delete from plot_bullseye_config where plot_config_id = $1; + + +-- name: PlotConfigMeasurementListBullseye :many +select + t.time, + locf(xm.value) as x, + locf(ym.value) as y +from plot_bullseye_config pc +inner join timeseries_measurement t +on t.timeseries_id = pc.x_axis_timeseries_id +or t.timeseries_id = pc.y_axis_timeseries_id +left join timeseries_measurement xm +on xm.timeseries_id = pc.x_axis_timeseries_id +and xm.time = t.time +left join timeseries_measurement ym +on ym.timeseries_id = pc.y_axis_timeseries_id +and ym.time = t.time +where pc.plot_config_id = sqlc.arg(plot_config_id) +and t.time > sqlc.arg(after) +and t.time < sqlc.arg(before) +group by t.time +order by t.time asc; diff --git a/api/queries/plot_config_contour.sql b/api/queries/plot_config_contour.sql new file mode 100644 index 00000000..bf44f6dc --- /dev/null +++ b/api/queries/plot_config_contour.sql @@ -0,0 +1,57 @@ +-- name: PlotContourConfigCreate :exec +insert into plot_contour_config (plot_config_id, "time", locf_backfill, gradient_smoothing, contour_smoothing, show_labels) +values ($1, $2, $3, $4, $5, $6); + + +-- name: PlotContourConfigUpdate :exec +update plot_contour_config set "time"=$2, locf_backfill=$3, gradient_smoothing=$4, contour_smoothing=$5, show_labels=$6 +where plot_config_id=$1; + + +-- name: PlotContourConfigDelete :exec +delete from plot_contour_config where plot_config_id = $1; + + +-- name: PlotContourConfigTimeseriesCreate :exec +insert into plot_contour_config_timeseries (plot_contour_config_id, timeseries_id) values ($1, $2) +on conflict (plot_contour_config_id, timeseries_id) do nothing; + + +-- name: PlotContourConfigTimeseriesCreateBatch :batchexec +insert into plot_contour_config_timeseries (plot_contour_config_id, timeseries_id) values ($1, $2) +on conflict (plot_contour_config_id, timeseries_id) do nothing; + + +-- name: PlotContourConfigTimeseriesDeleteForPlotContourConfig :exec +delete from plot_contour_config_timeseries where plot_contour_config_id = $1; + + +-- name: PlotContourConfigListTimeRange :many +select distinct mm.time +from plot_contour_config_timeseries pcts +inner join timeseries_measurement mm on mm.timeseries_id = pcts.timeseries_id +where pcts.plot_contour_config_id = sqlc.arg(plot_contour_config_id) +and mm.time > sqlc.arg(after) +and mm.time < sqlc.arg(before) +order by time asc; + + +-- name: PlotConfigMeasurementListContour :many +select + oi.x::double precision x, + oi.y::double precision y, + locf(mm.value) z +from plot_contour_config pc +left join plot_contour_config_timeseries pcts on pcts.plot_contour_config_id = pc.plot_config_id +left join timeseries_measurement mm on mm.timeseries_id = pcts.timeseries_id +inner join timeseries ts on ts.id = pcts.timeseries_id +inner join ( + select + ii.id, + st_x(st_centroid(ii.geometry)) as x, + st_y(st_centroid(ii.geometry)) as y + from instrument ii +) oi on oi.id = ts.instrument_id +where plot_config_id = $1 +and mm.time = $2 +group by pc.plot_config_id, pcts.timeseries_id, oi.x, oi.y; diff --git a/api/queries/plot_config_profile.sql b/api/queries/plot_config_profile.sql new file mode 100644 index 00000000..61b3e89e --- /dev/null +++ b/api/queries/plot_config_profile.sql @@ -0,0 +1,6 @@ +-- name: PlotProfileConfigCreate :exec +insert into plot_profile_config (plot_config_id, instrument_id) values ($1, $2); + + +-- name: PlotProfileConfigUpdate :exec +update plot_profile_config set instrument_id=$2 where plot_config_id=$1; diff --git a/api/queries/plot_config_scatter_line.sql b/api/queries/plot_config_scatter_line.sql new file mode 100644 index 00000000..0467a22a --- /dev/null +++ b/api/queries/plot_config_scatter_line.sql @@ -0,0 +1,47 @@ +-- name: PlotConfigScatterLineLayoutCreate :exec +insert into plot_scatter_line_config (plot_config_id, y_axis_title, y2_axis_title) values ($1, $2, $3); + + +-- name: PlotConfigScatterLineLayoutUpdate :exec +update plot_scatter_line_config set y_axis_title=$2, y2_axis_title=$3 where plot_config_id=$1; + + +-- name: PlotConfigTimeseriesTraceCreate :exec +insert into plot_configuration_timeseries_trace +(plot_configuration_id, timeseries_id, trace_order, color, line_style, width, show_markers, y_axis) values +($1, $2, $3, $4, $5, $6, $7, $8); + + +-- name: PlotConfigTimeseriesTracesCreateBatch :batchexec +insert into plot_configuration_timeseries_trace +(plot_configuration_id, timeseries_id, trace_order, color, line_style, width, show_markers, y_axis) values +($1, $2, $3, $4, $5, $6, $7, $8); + + +-- name: PlotConfigTimeseriesTraceUpdate :exec +update plot_configuration_timeseries_trace +set trace_order=$3, color=$4, line_style=$5, width=$6, show_markers=$7, y_axis=$8 +where plot_configuration_id=$1 and timeseries_id=$2; + + +-- name: PlotConfigTimeseriesTraceDeleteForPlotConfig :exec +delete from plot_configuration_timeseries_trace where plot_configuration_id=$1; + + +-- name: PlotConfigCustomShapeCreate :exec +insert into plot_configuration_custom_shape +(plot_configuration_id, enabled, name, data_point, color) values ($1, $2, $3, $4, $5); + + +-- name: PlotConfigCustomShapeCreateBatch :batchexec +insert into plot_configuration_custom_shape +(plot_configuration_id, enabled, name, data_point, color) values ($1, $2, $3, $4, $5); + + +-- name: PlotConfigCustomShapeUpdate :exec +update plot_configuration_custom_shape +set enabled=$2, name=$3, data_point=$4, color=$5 where plot_configuration_id=$1; + + +-- name: PlotConfigCustomShapeDeleteForPlotConfig :exec +delete from plot_configuration_custom_shape where plot_configuration_id=$1; diff --git a/api/queries/profile.sql b/api/queries/profile.sql new file mode 100644 index 00000000..5f443b26 --- /dev/null +++ b/api/queries/profile.sql @@ -0,0 +1,52 @@ +-- name: ProfileGetForEDIPI :one +select * from v_profile where edipi = $1; + + +-- name: ProfileGetForEmail :one +select * from v_profile where email ilike $1 +limit 1; + + +-- name: ProfileGetForUsername :one +select * from v_profile where username = $1 +limit 1; + + +-- name: ProfileTokenList :many +select token_id, issued from profile_token where profile_id = $1; + + +-- name: ProfileGetForToken :one +select p.id, p.edipi, p.username, p.email, p.is_admin +from profile_token t +left join v_profile p on p.id = t.profile_id +where t.token_id = $1 +limit 1; + + +-- name: ProfileCreate :one +insert into profile (edipi, username, email, display_name) values ($1, $2, $3, $4) returning id, username, email, display_name; + + +-- name: ProfileTokenCreate :one +insert into profile_token (token_id, profile_id, hash) values ($1,$2,$3) returning *; + + +-- name: ProfileTokenGet :one +select id, token_id, profile_id, issued, hash from profile_token where token_id=$1 limit 1; + + +-- name: ProfileUpdateForEDIPI :exec +UPDATE profile SET username=$1, email=$2, display_name=$3 WHERE edipi=$4; + + +-- name: ProfileUpdateForEmail :exec +update profile set username=$1, display_name=$2 where email ilike $3; + + +-- name: ProfileUpdateForUsername :exec +update profile set email=$1, display_name=$2 where username=$3; + + +-- name: ProfileTokenDelete :exec +delete from profile_token where profile_id=$1 and token_id=$2; diff --git a/api/queries/project.sql b/api/queries/project.sql new file mode 100644 index 00000000..4f33d93a --- /dev/null +++ b/api/queries/project.sql @@ -0,0 +1,59 @@ +-- name: ProjectList :many +select * from v_project; + + +-- name: ProjectListForNameSearch :many +select * from v_project +where name ilike '%'||sqlc.arg(name)||'%' +limit sqlc.arg(result_limit); + + +-- name: ProjectListForFederalID :many +select * from v_project +where federal_id = sqlc.arg(federal_id); + + +-- name: DistrictList :many +select * from v_district; + + +-- name: ProjectListForProfileRole :many +select p.* +from v_project p +inner join profile_project_roles pr on pr.project_id = p.id +inner join role r on r.id = pr.role_id +where pr.profile_id = $1 +and r.name = $2; + + +-- name: ProjectListForProfileAdmin :many +select pr.project_id from profile_project_roles pr +inner join role ro on ro.id = pr.role_id +where pr.profile_id = $1 +and ro.name = 'ADMIN'; + + +-- name: ProjectGetCount :one +select count(*) from project where not deleted; + + +-- name: ProjectGet :one +select * from v_project where id = $1; + + +-- name: ProjectCreateBatch :batchone +insert into project (federal_id, slug, name, district_id, created_by, created_at) +values ($1, slugify($2, 'project'), $2, $3, $4, $5) +returning id, slug; + + +-- name: ProjectUpdate :one +update project set name=$2, updated_by=$3, updated_at=$4, district_id=$5, federal_id=$6 where id=$1 returning id; + + +-- name: ProjectUpdateImage :exec +update project set image = $1 where id = $2; + + +-- name: ProjectDeleteFlag :exec +update project set deleted=true where id = $1; diff --git a/api/queries/project_role.sql b/api/queries/project_role.sql new file mode 100644 index 00000000..e3dd9c1a --- /dev/null +++ b/api/queries/project_role.sql @@ -0,0 +1,42 @@ +-- name: ProfileProjectRoleListForProject :many +select id, profile_id, username, email, role_id, role +from v_profile_project_roles +where project_id = $1 +order by email; + + +-- name: ProfileProjectRoleGet :one +select id, profile_id, username, email, role_id, role +from v_profile_project_roles +where id = $1; + + +-- name: ProfileProjectRoleCreate :one +insert into profile_project_roles (project_id, profile_id, role_id, granted_by) +values ($1, $2, $3, $4) +on conflict on constraint unique_profile_project_role do update set project_id = excluded.project_id +returning id; + + +-- name: ProfileProjectRoleDelete :exec +delete from profile_project_roles where project_id = $1 and profile_id = $2 and role_id = $3; + + +-- name: ProfileProjectRoleGetIsAdmin :one +select exists ( + select 1 from profile_project_roles pr + inner join role r on r.id = pr.role_id + where pr.profile_id = $1 + and pr.project_id = $2 + and r.name = 'ADMIN' +); + + +-- name: ProfileProjectRoleGetIsMemberOrAdmin :one +select exists ( + select 1 from profile_project_roles pr + inner join role r on r.id = pr.role_id + where pr.profile_id = $1 + and pr.project_id = $2 + and (r.name = 'MEMBER' or r.name = 'ADMIN') +); diff --git a/api/queries/report_config.sql b/api/queries/report_config.sql new file mode 100644 index 00000000..c6a60f22 --- /dev/null +++ b/api/queries/report_config.sql @@ -0,0 +1,63 @@ +-- name: ReportConfigCreate :one +insert into report_config ( + name, slug, project_id, created_by, description, date_range, date_range_enabled, + show_masked, show_masked_enabled, show_nonvalidated, show_nonvalidated_enabled +) +values ($1, slugify($1, 'report_config'), $2, $3, $4, $5, $6, $7, $8, $9, $10) +returning id; + + +-- name: ReportConfigListForProject :many +select * from v_report_config where project_id = $1; + + +-- name: ReportConfigListForReportConfigWithPlotConfig :many +select * from v_plot_configuration where id = any( + select plot_config_id from report_config_plot_config where report_config_id = $1 +); + + +-- name: ReportConfigGet :one +select * from v_report_config where id = $1; + + +-- name: ReportConfigUpdate :exec +update report_config set name=$2, +updated_by=$3, updated_at=$4, description=$5, date_range=$6, date_range_enabled=$7, show_masked=$8, +show_masked_enabled=$9, show_nonvalidated=$10, show_nonvalidated_enabled=$11 where id=$1; + + +-- name: ReportConfigDelete :exec +delete from report_config where id=$1; + + +-- name: ReportConfigPlotConfigCreate :exec +insert into report_config_plot_config (report_config_id, plot_config_id) values ($1, $2); + + +-- name: ReportConfigPlotConfigCreateBatch :batchexec +insert into report_config_plot_config (report_config_id, plot_config_id) values ($1, $2); + + +-- name: ReportConfigPlotConfigDelete :exec +delete from report_config_plot_config where report_config_id=$1 and plot_config_id=$2; + + +-- name: ReportConfigPlotConfigDeleteBatch :batchexec +delete from report_config_plot_config where report_config_id=$1 and plot_config_id=$2; + + +-- name: ReportConfigPlotConfigDeleteForReportConfig :exec +delete from report_config_plot_config where report_config_id=$1; + + +-- name: ReportDownloadJobGet :one +select * from report_download_job where id=$1 and created_by=$2; + + +-- name: ReportDownloadJobCreate :one +insert into report_download_job (report_config_id, created_by) values ($1, $2) returning *; + + +-- name: ReportDownloadJobUpdate :exec +update report_download_job set status=$2, progress=$3, progress_updated_at=$4, file_key=$5, file_expiry=$6 where id=$1; diff --git a/api/queries/submittal.sql b/api/queries/submittal.sql new file mode 100644 index 00000000..37d4cab5 --- /dev/null +++ b/api/queries/submittal.sql @@ -0,0 +1,57 @@ +-- name: SubmittalListForProject :many +select * +from v_submittal +where project_id = sqlc.arg(project_id) +and (sqlc.arg(show_incomplete_missing)::boolean = false or (completed_at is null and not marked_as_missing)) +order by due_at desc, alert_type_name asc; + + +-- name: SubmittalListForInstrument :many +select sub.* +from v_submittal sub +inner join alert_config_instrument aci on aci.alert_config_id = sub.alert_config_id +where aci.instrument_id = sqlc.arg(instrument_id) +and (sqlc.arg(show_incomplete_missing)::boolean = false or (completed_at is null and not marked_as_missing)) +order by sub.due_at desc; + + +-- name: SubmittalListForAlertConfig :many +select * +from v_submittal +where alert_config_id = sqlc.arg(alert_config_id) +and (sqlc.arg(show_incomplete_missing)::boolean = false or (completed_at is null and not marked_as_missing)) +order by due_at desc; + + +-- name: SubmittalListUnverifiedMissing :many +select * +from v_submittal +where completed_at is null +and not marked_as_missing +order by due_at desc; + + +-- name: SubmittalUpdate :exec +update submittal set + submittal_status_id = $2, + completed_at = $3, + warning_sent = $4 +where id = $1; + + +-- name: SubmittalUpdateVerifyMissing :exec +update submittal set + submittal_status_id = '84a0f437-a20a-4ac2-8a5b-f8dc35e8489b'::uuid, + marked_as_missing = true +where id = $1 +and completed_at is null +and now() > due_at; + + +-- name: SubmittalUpdateVerifyMissingForAlertConfig :exec +update submittal set + submittal_status_id = '84a0f437-a20a-4ac2-8a5b-f8dc35e8489b'::uuid, + marked_as_missing = true +where alert_config_id = $1 +and completed_at is null +and now() > due_at; diff --git a/api/queries/timeseries.sql b/api/queries/timeseries.sql new file mode 100644 index 00000000..c03e5f74 --- /dev/null +++ b/api/queries/timeseries.sql @@ -0,0 +1,63 @@ +-- name: TimeseriesGetExistsStored :one +select exists (select id from v_timeseries_stored where id = $1); + + +-- name: TimeseriesGet :one +select * from v_timeseries where id=$1; + + +-- name: TimeseriesGetAllBelongToProject :one +select not exists ( + select true + from timeseries ts + where not ts.instrument_id = any ( + select p.instrument_id + from project_instrument p + where p.project_id = sqlc.arg(project_id) + ) + and ts.id = any(sqlc.arg(timeseries_ids)::uuid[]) +); + + +-- name: TimeseriesListForInstrument :many +select * from v_timeseries +where instrument_id = $1; + + +-- name: TimeseriesListForPlotConfig :many +select t.* from v_timeseries t +inner join plot_configuration_timeseries_trace pct on pct.timeseries_id = t.id +where pct.plot_configuration_id = $1; + + +-- name: TimeseriesListForInstrumentGroup :many +select t.* from v_timeseries t +inner join instrument_group_instruments gi on gi.instrument_id = t.instrument_id +where gi.instrument_group_id = $1; + + +-- name: TimeseriesListForProject :many +select t.* from v_timeseries t +inner join project_instrument p on p.instrument_id = t.instrument_id +where p.project_id = $1; + + +-- name: TimeseriesCreate :one +insert into timeseries (instrument_id, slug, name, parameter_id, unit_id, type) +values ($1, slugify($2, 'timeseries'), $2, $3, $4, $5) +returning id, instrument_id, slug, name, parameter_id, unit_id, type; + + +-- name: TimeseriesCreateBatch :batchone +insert into timeseries (instrument_id, slug, name, parameter_id, unit_id, type) +values ($1, slugify($2, 'timeseries'), $2, $3, $4, $5) +returning id, instrument_id, slug, name, parameter_id, unit_id, type; + + +-- name: TimeseriesUpdate :exec +update timeseries set name=$2, instrument_id=$3, parameter_id=$4, unit_id=$5 +where id = $1; + + +-- name: TimeseriesDelete :exec +delete from timeseries where id = $1; diff --git a/api/queries/timeseries_calculated.sql b/api/queries/timeseries_calculated.sql new file mode 100644 index 00000000..4b2f39c3 --- /dev/null +++ b/api/queries/timeseries_calculated.sql @@ -0,0 +1,54 @@ +-- name: TimeseriesComputedGet :one +select + id, + instrument_id, + parameter_id, + unit_id, + slug, + name as formula_name, + coalesce(contents, '') as formula +from v_timeseries_computed +where id = $1; + + +-- name: TimeseriesComputedListForInstrument :many +select + id, + instrument_id, + parameter_id, + unit_id, + slug, + name as formula_name, + coalesce(contents, '') as formula +from v_timeseries_computed +where instrument_id = $1; + + +-- name: CalculationCreate :exec +insert into calculation (timeseries_id, contents) values ($1,$2); + + +-- name: CalculationUpdate :exec +update calculation set contents=$2 where timeseries_id=$1; + + +-- name: TimeseriesComputedDelete :exec +delete from timeseries where id = $1 and id = any(select timeseries_id from calculation); + + +-- the below queried are needed becuase the slug is currently used as the variable name, it would +-- be better if we used a generated column for this on the timeseries table, maybe converted to snake_case + +-- name: TimeseriesComputedCreate :one +insert into timeseries (instrument_id, parameter_id, unit_id, slug, name, type) +values ($1, $2, $3, slugify($4, 'timeseries'), $4, 'computed') +returning id; + + +-- name: TimeseriesComputedUpdate :exec +update timeseries set + parameter_id=$2, + unit_id=$3, + slug=$4, + name=$5 +where id = $1; diff --git a/api/queries/timeseries_cwms.sql b/api/queries/timeseries_cwms.sql new file mode 100644 index 00000000..b501c5a9 --- /dev/null +++ b/api/queries/timeseries_cwms.sql @@ -0,0 +1,27 @@ +-- name: TimeseriesCwmsList :many +select * from v_timeseries_cwms +where instrument_id = $1; + + +-- name: TimeseriesCwmsGet :one +select * from v_timeseries_cwms +where id = $1; + + +-- name: TimeseriesCwmsCreate :exec +insert into timeseries_cwms (timeseries_id, cwms_timeseries_id, cwms_office_id, cwms_extent_earliest_time, cwms_extent_latest_time) values +($1, $2, $3, $4, $5); + + +-- name: TimeseriesCwmsCreateBatch :batchexec +insert into timeseries_cwms (timeseries_id, cwms_timeseries_id, cwms_office_id, cwms_extent_earliest_time, cwms_extent_latest_time) values +($1, $2, $3, $4, $5); + + +-- name: TimeseriesCwmsUpdate :exec +update timeseries_cwms set + cwms_timeseries_id=$2, + cwms_office_id=$3, + cwms_extent_earliest_time=$4, + cwms_extent_latest_time=$5 +where timeseries_id=$1; diff --git a/api/queries/unit.sql b/api/queries/unit.sql new file mode 100644 index 00000000..d0b92e41 --- /dev/null +++ b/api/queries/unit.sql @@ -0,0 +1,4 @@ +-- name: UnitsList :many +select id, name, abbreviation, unit_family_id, unit_family, measure_id, measure +from v_unit +order by name; diff --git a/api/queries/uploader.sql b/api/queries/uploader.sql new file mode 100644 index 00000000..f395a4a1 --- /dev/null +++ b/api/queries/uploader.sql @@ -0,0 +1,84 @@ +-- name: UploaderConfigListForProject :many +select * from v_uploader_config where project_id=$1; + + +-- name: UploaderConfigGet :one +select * from v_uploader_config where id=$1; + + +-- name: UploaderConfigCreate :one +insert into uploader_config ( + project_id, + name, + slug, + description, + created_at, + created_by, + type, + tz_name, + time_field, + validated_field_enabled, + validated_field, + masked_field_enabled, + masked_field, + comment_field_enabled, + comment_field, + column_offset, + row_offset +) +values ( + sqlc.arg(project_id), + sqlc.arg(name), + slugify(sqlc.arg(slug), 'uploader_config'), + sqlc.arg(description), + sqlc.arg(created_at), + sqlc.arg(created_by), + sqlc.arg(type), + sqlc.arg(tz_name), + sqlc.arg(time_field), + sqlc.arg(validated_field_enabled), + sqlc.arg(validated_field), + sqlc.arg(masked_field_enabled), + sqlc.arg(masked_field), + sqlc.arg(comment_field_enabled), + sqlc.arg(comment_field), + sqlc.arg(column_offset), + sqlc.arg(row_offset) +) +returning id; + + +-- name: UploaderConfigUpdate :exec +update uploader_config set + name=$2, + description=$3, + updated_by=$4, + updated_at=$5, + type=$6, + tz_name=$7, + time_field=$8, + validated_field_enabled=$9, + validated_field=$10, + masked_field_enabled=$11, + masked_field=$12, + comment_field_enabled=$13, + comment_field=$14, + column_offset=$15, + row_offset=$16 +where id=$1; + + +-- name: UploaderConfigDelete :exec +delete from uploader_config where id=$1; + + +-- name: UploaderConfigMappingList :many +select * from uploader_config_mapping where uploader_config_id=$1; + + +-- name: UploaderConfigMappingCreateBatch :batchexec +insert into uploader_config_mapping (uploader_config_id, field_name, timeseries_id) values ($1, $2, $3); + + +-- name: UploaderConfigMappingDeleteForUploaderConfig :exec +delete from uploader_config_mapping where uploader_config_id=$1; diff --git a/compose.sh b/compose.sh index 4ce826ef..2e3e02f6 100755 --- a/compose.sh +++ b/compose.sh @@ -2,7 +2,10 @@ set -Eeo pipefail -parent_path=$( cd "$(dirname "${BASH_SOURCE[0]}")" ; pwd -P ) +parent_path=$( + cd "$(dirname "${BASH_SOURCE[0]}")" + pwd -P +) cd "$parent_path" COMPOSECMD="docker compose -f docker-compose.yml" @@ -10,28 +13,28 @@ COMPOSECMD="docker compose -f docker-compose.yml" mkdocs() { ( DOCKER_BUILDKIT=1 docker build --file api/Dockerfile.openapi --output api/internal/server/docs api - cd report && npm run generate >/dev/null; + cd report && npm run generate >/dev/null ) } +if [ "$1" = "gen" ]; then + docker run --rm -v $(pwd):/src -w /src sqlc/sqlc generate -if [ "$1" = "watch" ]; then +elif [ "$1" = "watch" ]; then mkdocs -q if [ "$2" = "mock" ]; then - DOCKER_BUILDKIT=1 $COMPOSECMD -f docker-compose.dev.yml --profile=mock watch + DOCKER_BUILDKIT=1 $COMPOSECMD -f docker-compose.dev.yml --profile=mock up --watch else - DOCKER_BUILDKIT=1 $COMPOSECMD -f docker-compose.dev.yml watch + DOCKER_BUILDKIT=1 $COMPOSECMD -f docker-compose.dev.yml up --watch fi - elif [ "$1" = "up" ]; then mkdocs -q if [ "$2" = "mock" ]; then - DOCKER_BUILDKIT=1 $COMPOSECMD --profile=mock up -d --build + DOCKER_BUILDKIT=1 $COMPOSECMD --profile=mock up --remove-orphans -d --build else - DOCKER_BUILDKIT=1 $COMPOSECMD up -d --build + DOCKER_BUILDKIT=1 $COMPOSECMD up --remove-orphans -d --build fi - elif [ "$1" = "build" ]; then if [ "$2" = "local" ] || [ "$2" = "develop" ] || [ "$2" = "test" ] || [ "$2" = "prod" ]; then @@ -46,20 +49,19 @@ elif [ "$1" = "build" ]; then AMD64_TARGET_PLATFORM=true fi - for BUILD_TARGET in midas-api midas-sql midas-telemetry midas-alert midas-dcs-loader - do - docker build \ - ${AMD64_TARGET_PLATFORM:+--platform=linux/amd64} \ - --build-arg="BASE_IMAGE=${SCRATCH_BASE_IMAGE}" \ - --build-arg="GO_VERSION=1.23" \ - --build-arg="BUILD_TAG=$2" \ - --build-arg="BUILD_TARGET=${BUILD_TARGET}" \ - -t $BUILD_TARGET:"$2" api + for BUILD_TARGET in midas-api midas-sql midas-telemetry midas-alert midas-dcs-loader; do + docker build \ + ${AMD64_TARGET_PLATFORM:+--platform=linux/amd64} \ + --build-arg="BASE_IMAGE=${SCRATCH_BASE_IMAGE}" \ + --build-arg="GO_VERSION=1.23" \ + --build-arg="BUILD_TAG=$2" \ + --build-arg="BUILD_TARGET=${BUILD_TARGET}" \ + -t $BUILD_TARGET:"$2" api done docker build \ - --build-arg="BASE_IMAGE=${ALPINE_BASE_IMAGE}" \ - -t midas-report:$2 report + --build-arg="BASE_IMAGE=${ALPINE_BASE_IMAGE}" \ + -t midas-report:$2 report else echo -e "usage:\n\t./compose.sh build [local,develop,test,prod]" exit 1 @@ -74,8 +76,7 @@ elif [ "$1" = "build" ]; then declare -a REGISTRIES=("midas-api" "midas-telemetry" "midas-alert" "midas-dcs-loader" "midas-sql") # tag - for IMAGE in "${REGISTRIES[@]}" - do + for IMAGE in "${REGISTRIES[@]}"; do docker tag $IMAGE:"$2" $4/$IMAGE:"$2" done if [ "$2" = "develop" ]; then @@ -83,8 +84,7 @@ elif [ "$1" = "build" ]; then fi # push - for IMAGE in "${REGISTRIES[@]}" - do + for IMAGE in "${REGISTRIES[@]}"; do docker push $4/$IMAGE:"$2" done if [ "$2" = "develop" ]; then @@ -92,22 +92,18 @@ elif [ "$1" = "build" ]; then fi fi - elif [ "$1" = "authdbdump" ]; then - $COMPOSECMD exec authdb pg_dump postgres > auth/initdb/init2.sql - + $COMPOSECMD exec authdb pg_dump postgres >auth/initdb/init2.sql elif [ "$1" = "down" ]; then mkdocs -q $COMPOSECMD -f docker-compose.dev.yml --profile=mock down - elif [ "$1" = "clean" ]; then $COMPOSECMD -f docker-compose.dev.yml --profile=mock down -v - elif [ "$1" = "test" ]; then - docker compose build + $COMPOSECMD up --remove-orphans -d --build db migrate elasticmq api shift TEARDOWN=false @@ -115,34 +111,34 @@ elif [ "$1" = "test" ]; then while [[ $# -gt 0 ]]; do case $1 in - -rm) - TEARDOWN=true - shift - ;; - *) - REST_ARGS+=("$1") - shift - ;; + -rm) + TEARDOWN=true + shift + ;; + *) + REST_ARGS+=("$1") + shift + ;; esac done GOCMD="go test ${REST_ARGS[@]} github.com/USACE/instrumentation-api/api/internal/handler" + set +e if [ "$REPORT" = true ]; then - docker compose run -e INSTRUMENTATION_AUTH_JWT_MOCKED=true --entrypoint="$GOCMD" api > $(pwd)/test.log + docker compose run --remove-orphans -e INSTRUMENTATION_AUTH_JWT_MOCKED=true --entrypoint="$GOCMD" api >$(pwd)/test.log else - docker compose run -e INSTRUMENTATION_AUTH_JWT_MOCKED=true --entrypoint="$GOCMD" api + docker compose run --remove-orphans -e INSTRUMENTATION_AUTH_JWT_MOCKED=true --entrypoint="$GOCMD" api fi + set -e if [ $TEARDOWN = true ]; then docker compose --profile=mock down -v fi - elif [ "$1" = "mkdocs" ]; then mkdocs - else echo -e "usage:\n\t./compose.sh watch\n\t./compose.sh up\n\t./compose.sh down\n\t./compose.sh clean\n\t./compose.sh test\n\t./compose.sh mkdocs" fi diff --git a/docker-compose.yml b/docker-compose.yml index e3465ea9..f2d9c487 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -87,13 +87,6 @@ services: required: true ports: - "${API_PORT:-8080}:80" - depends_on: - db: - condition: service_healthy - migrate: - condition: service_completed_successfully - elasticmq: - condition: service_started telemetry: build: diff --git a/go.work.sum b/go.work.sum index 4ab4a8b4..38e3ed0a 100644 --- a/go.work.sum +++ b/go.work.sum @@ -1,49 +1,96 @@ +dario.cat/mergo v1.0.0/go.mod h1:uNxQE+84aUszobStD9th8a29P2fMDhsBdgRYvZOxGmk= +filippo.io/edwards25519 v1.1.0 h1:FNf4tywRC1HmFuKW5xopWpigGjJKiJSV0Cqo0cJWDaA= +github.com/Azure/go-ansiterm v0.0.0-20230124172434-306776ec8161/go.mod h1:xomTg63KZ2rFqZQzSB4Vz2SUXa1BpHTVz9L5PTmPC4E= github.com/ClickHouse/ch-go v0.61.5/go.mod h1:s1LJW/F/LcFs5HJnuogFMta50kKDO0lf9zzfrbl0RQg= github.com/ClickHouse/clickhouse-go/v2 v2.27.1/go.mod h1:XvcaX7ai9T9si83rZ0cB3y2upq9AYMwdj16Trqm+sPg= +github.com/Microsoft/go-winio v0.6.1/go.mod h1:LRdKpFKfdobln8UmuiYcKPot9D2v6svN5+sAH+4kjUM= +github.com/Microsoft/hcsshim v0.12.0/go.mod h1:RZV12pcHCXQ42XnlQ3pz6FZfmrC1C+R4gaOHhRNML1g= github.com/PuerkitoBio/purell v1.1.1/go.mod h1:c11w/QuzBsJSee3cPx9rAFu61PvFxuPbtSwDGJws/X0= github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578/go.mod h1:uGdkoq3SwY9Y+13GIhn11/XLaGBb4BfwItxLd5jeuXE= github.com/andybalholm/brotli v1.1.0/go.mod h1:sms7XGricyQI9K10gOSf56VKKWS4oLer58Q+mhRPtnY= github.com/antlr4-go/antlr/v4 v4.13.0/go.mod h1:pfChB/xh/Unjila75QW7+VU4TSnWnnk9UTnmpPaOR2g= +github.com/cenkalti/backoff/v4 v4.2.1/go.mod h1:Y3VNntkOUPxTVeUxJ/G5vcM//AlwfmyYozVcomhLiZE= github.com/coder/websocket v1.8.12/go.mod h1:LNVeNrXQZfe5qhS9ALED3uA+l5pPqvwXg3CKoDBB2gs= +github.com/containerd/containerd v1.7.14/go.mod h1:YMC9Qt5yzNqXx/fO4j/5yYVIHXSRrlB3H7sxkUTvspg= +github.com/containerd/log v0.1.0/go.mod h1:VRRf09a7mHDIRezVKTRCrOq78v577GXq3bSa3EhrzVo= +github.com/cpuguy83/dockercfg v0.3.1/go.mod h1:sugsbF4//dDlL/i+S+rtpIWp+5h0BHJHfjj5/jFyUJc= github.com/cpuguy83/go-md2man/v2 v2.0.0-20190314233015-f79a8a8ca69d/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU= +github.com/distribution/reference v0.5.0/go.mod h1:BbU0aIcezP1/5jX/8MP0YiH4SdvB5Y4f/wlDRiLyi3E= +github.com/docker/docker v26.1.5+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk= +github.com/docker/go-connections v0.5.0/go.mod h1:ov60Kzw0kKElRwhNs9UlUHAE/F9Fe6GLaXnqyDdmEXc= +github.com/docker/go-units v0.5.0/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk= github.com/elastic/go-sysinfo v1.11.2/go.mod h1:GKqR8bbMK/1ITnez9NIsIfXQr25aLhRJa7AfT8HpBFQ= github.com/elastic/go-windows v1.0.1/go.mod h1:FoVvqWSun28vaDQPbj2Elfc0JahhPB7WQEGa3c814Ss= +github.com/felixge/httpsnoop v1.0.4/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U= github.com/go-faster/city v1.0.1/go.mod h1:jKcUJId49qdW3L1qKHH/3wPeUstCVpVSXTM6vO3VcTw= github.com/go-faster/errors v0.7.1/go.mod h1:5ySTjWFiphBs07IKuiL69nxdfd5+fzh1u7FPGZP2quo= +github.com/go-logr/logr v1.4.1/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY= +github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE= +github.com/go-ole/go-ole v1.3.0/go.mod h1:5LS6F96DhAwUc7C+1HLexzMXY1xGRSryjyPPKW6zv78= +github.com/go-sql-driver/mysql v1.8.1 h1:LedoTUt/eveggdHS9qUFC1EFSa8bU2+1pZjSRpvNJ1Y= +github.com/gofrs/uuid v4.0.0+incompatible h1:1SD/1F5pU8p29ybwgQSwpQk+mwdRrXCYuPhW6m+TnJw= +github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= github.com/golang-jwt/jwt/v4 v4.5.0/go.mod h1:m21LjoU+eqJr34lmDMbreY2eSTRJ1cv77w39/MY0Ch0= github.com/golang-sql/civil v0.0.0-20220223132316-b832511892a9/go.mod h1:8vg3r2VgvsThLBIFL93Qb5yWzgyZWhEmBwUJWevAkK0= github.com/golang-sql/sqlexp v0.1.0/go.mod h1:J4ad9Vo8ZCWQ2GMrC4UCQy1JpCbwU9m3EOqtpKwwwHI= github.com/golang/protobuf v1.5.4/go.mod h1:lnTiLA8Wa4RWRcIUkrtSVa5nRhsEGBg48fD6rSs7xps= github.com/golang/snappy v0.0.4/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= -github.com/jackc/chunkreader v1.0.0 h1:4s39bBR8ByfqH+DKm8rQA3E1LHZWB9XWcrz8fqaZbe0= -github.com/jackc/pgproto3 v1.1.0 h1:FYYE4yRw+AgI8wXIinMlNjBbp/UitDJwfj5LqqewP1A= +github.com/jackc/pgmock v0.0.0-20210724152146-4ad1a8207f65 h1:DadwsjnMwFjfWc9y5Wi/+Zz7xoE5ALHsRQlOctkOiHc= github.com/jackc/puddle v1.3.0 h1:eHK/5clGOatcjX3oWGBO/MpxpbHzSwud5EWTSCI+MX0= github.com/jmespath/go-jmespath v0.4.0/go.mod h1:T8mJZnbsbmF+m6zOOFylbeCJqk5+pHWvzYPziyZiYoo= github.com/joeshaw/multierror v0.0.0-20140124173710-69b34d4ec901/go.mod h1:Z86h9688Y0wesXCyonoVr47MasHilkuLMqGhRZ4Hpak= github.com/jonboulle/clockwork v0.4.0/go.mod h1:xgRqUGwRcjKCO1vbZUEtSLrqKoPSsUpK7fnezOII0kc= github.com/klauspost/compress v1.17.7/go.mod h1:Di0epgTjJY877eYKx5yC51cX2A2Vl2ibi7bDH9ttBbw= github.com/lucasjones/reggen v0.0.0-20200904144131-37ba4fa293bb/go.mod h1:5ELEyG+X8f+meRWHuqUOewBOhvHkl7M76pdGEansxW4= +github.com/lufia/plan9stats v0.0.0-20240226150601-1dcf7310316a/go.mod h1:ilwx/Dta8jXAgpFYFvSWEMwxmbWXyiUHkd5FwyKhb5k= +github.com/magiconair/properties v1.8.7/go.mod h1:Dhd985XPs7jluiymwWYZ0G4Z61jb3vdS329zhj2hYo0= +github.com/mattn/go-sqlite3 v1.14.22 h1:2gZY6PC6kBnID23Tichd1K+Z0oS6nE/XwU+Vz/5o4kU= github.com/mfridman/xflag v0.0.0-20240825232106-efb77353e578/go.mod h1:/483ywM5ZO5SuMVjrIGquYNE5CzLrj5Ux/LxWWnjRaE= github.com/microsoft/go-mssqldb v1.7.2/go.mod h1:kOvZKUdrhhFQmxLZqbwUV0rHkNkZpthMITIb2Ko1IoA= +github.com/moby/docker-image-spec v1.3.1/go.mod h1:eKmb5VW8vQEh/BAr2yvVNvuiJuY6UIocYsFu/DxxRpo= +github.com/moby/patternmatcher v0.6.0/go.mod h1:hDPoyOpDY7OrrMDLaYoY3hf52gNCR/YOUYxkhApJIxc= +github.com/moby/sys/sequential v0.5.0/go.mod h1:tH2cOOs5V9MlPiXcQzRC+eEyab644PWKGRYaaV5ZZlo= +github.com/moby/sys/user v0.1.0/go.mod h1:fKJhFOnsCN6xZ5gSfbM6zaHGgDJMrqt9/reuj4T7MmU= +github.com/moby/term v0.5.0/go.mod h1:8FzsFHVUBGZdbDsJw/ot+X+d5HLUbvklYLJ9uGfcI3Y= github.com/montanaflynn/stats v0.7.1/go.mod h1:etXPPgVO6n31NxCd9KQUMvCM+ve0ruNzt6R8Bnaayow= +github.com/morikuni/aec v1.0.0/go.mod h1:BbKIizmSmc5MMPqRYbxO4ZU0S0+P200+tUnFx7PXmsc= +github.com/opencontainers/go-digest v1.0.0/go.mod h1:0JzlMkj0TRzQZfJkVvzbP0HBR3IKzErnv2BNG4W4MAM= +github.com/opencontainers/image-spec v1.1.0/go.mod h1:W4s4sFTMaBeK1BQLXbG4AdM2szdn85PY75RI83NrTrM= github.com/pierrec/lz4/v4 v4.1.21/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4= +github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/power-devops/perfstat v0.0.0-20240221224432-82ca36839d55/go.mod h1:OmDBASR4679mdNQnz2pUhc2G8CO2JrUAVFDRBDP/hJE= github.com/prometheus/procfs v0.12.0/go.mod h1:pcuDEFsWDnvcgNzo4EEweacyhjeA9Zk3cnaOZAZEfOo= github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= github.com/segmentio/asm v1.2.0/go.mod h1:BqMnlJP91P8d+4ibuonYZw9mfnzI9HfxselHZr5aAcs= +github.com/shirou/gopsutil/v3 v3.24.2/go.mod h1:tSg/594BcA+8UdQU2XcW803GWYgdtauFFPgJCJKZlVk= +github.com/shoenig/go-m1cpu v0.1.6/go.mod h1:1JJMcUBvfNwpq05QDQVAnx3gUHr9IYF7GNg9SUEw2VQ= github.com/shopspring/decimal v1.4.0/go.mod h1:gawqmDU56v4yIKSwfBSFip1HdCCXN8/+DMd9qYNcwME= github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc= +github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ= github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA= +github.com/testcontainers/testcontainers-go v0.29.1/go.mod h1:SnKnKQav8UcgtKqjp/AD8bE1MqZm+3TDb/B8crE3XnI= +github.com/testcontainers/testcontainers-go/modules/postgres v0.29.1/go.mod h1:YsWyy+pHDgvGdi0axGOx6CGXWsE6eqSaApyd1FYYSSc= +github.com/tklauser/go-sysconf v0.3.13/go.mod h1:zwleP4Q4OehZHGn4CYZDipCgg9usW5IJePewFCGVEa0= +github.com/tklauser/numcpus v0.7.0/go.mod h1:bb6dMVcj8A42tSE7i32fsIUCbQNllK5iDguyOZRUzAY= github.com/tursodatabase/libsql-client-go v0.0.0-20240812094001-348a4e45b535/go.mod h1:l8xTsYB90uaVdMHXMCxKKLSgw5wLYBwBKKefNIUnm9s= +github.com/twpayne/go-kml/v3 v3.1.1/go.mod h1:7VT0jsr6fzn5CPZ5e4OB93vhgf3fZcwflK7ydbXFVos= github.com/urfave/cli/v2 v2.3.0/go.mod h1:LJmUH05zAU44vOAcrfzZQKsZbVcdbOG8rtL3/XcUArI= github.com/vertica/vertica-sql-go v1.3.3/go.mod h1:jnn2GFuv+O2Jcjktb7zyc4Utlbu9YVqpHH/lx63+1M4= github.com/xdg-go/scram v1.1.2/go.mod h1:RT/sEzTbU5y00aCK8UOx6R7YryM0iF1N2MOmC3kKLN4= github.com/xdg-go/stringprep v1.0.4/go.mod h1:mPGuuIYwz7CmR2bT9j4GbQqutWS1zV24gijq1dTyGkM= github.com/ydb-platform/ydb-go-genproto v0.0.0-20240528144234-5d5a685e41f7/go.mod h1:Er+FePu1dNUieD+XTMDduGpQuCPssK5Q4BjF+IIXJ3I= github.com/ydb-platform/ydb-go-sdk/v3 v3.76.5/go.mod h1:IHwuXyolaAmGK2Dp7+dlhsnXphG1pwCoaP/OITT3+tU= +github.com/yusufpapurcu/wmi v1.2.4/go.mod h1:SBZ9tNy3G9/m5Oi98Zks0QjeHVDvuK0qfxQmPyzfmi0= github.com/ziutek/mymysql v1.5.4/go.mod h1:LMSpPZ6DbqWFxNCHW77HeMg9I646SAhApZ/wKdgO/C0= +go.mongodb.org/mongo-driver v1.11.4 h1:4ayjakA013OdpGyL2K3ZqylTac/rMjrJOMZ1EHizXas= +go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.49.0/go.mod h1:p8pYQP+m5XfbZm9fxtSKAbM6oIllS7s2AfxrChvc7iw= +go.opentelemetry.io/otel v1.24.0/go.mod h1:W7b9Ozg4nkF5tWI5zsXkaKKDjdVjpD4oAt9Qi/MArHo= go.opentelemetry.io/otel v1.26.0/go.mod h1:UmLkJHUAidDval2EICqBMbnAd0/m2vmpf/dAM+fvFs4= +go.opentelemetry.io/otel/metric v1.24.0/go.mod h1:VYhLe1rFfxuTXLgj4CBiyz+9WYBA8pNGJgDcSFRKBco= +go.opentelemetry.io/otel/trace v1.24.0/go.mod h1:HPc3Xr/cOApsBI154IU0OI0HJexz+aw5uPdbs3UCjNU= go.opentelemetry.io/otel/trace v1.26.0/go.mod h1:4iDxvGDQuUkHve82hJJ8UqrwswHYsZuWCBllGV2U2y0= golang.org/x/crypto v0.25.0 h1:ypSNr+bnYL2YhwoMt2zPxHFmbAN1KZs/njMG3hxUp30= +golang.org/x/exp v0.0.0-20240222234643-814bf88cf225/go.mod h1:CxmFvTBINI24O/j8iY7H1xHzx2i4OsyguNBmN/uPtqc= golang.org/x/exp v0.0.0-20240325151524-a685a6edb6d8/go.mod h1:CQ1k9gNrJ50XIzaKCRR2hssIjF07kZFEiieALBM/ARQ= golang.org/x/mod v0.9.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= golang.org/x/mod v0.17.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= @@ -57,6 +104,7 @@ golang.org/x/term v0.22.0/go.mod h1:F3qCibpT5AMpCRfhfT53vVJwhLtIVHhB9XDjfFvnMI4= golang.org/x/term v0.24.0/go.mod h1:lOBK/LVxemqiMij05LGJ0tzNr8xlmwBRJ81PX6wVLH8= golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d/go.mod h1:aiJjzUbINMkxbQROHiO6hDPo2LHcIPhhQsa9DLh0yGk= google.golang.org/genproto/googleapis/rpc v0.0.0-20240123012728-ef4313101c80/go.mod h1:PAREbraiVEVGVdTZsVWjSbbTtSyGbAgIIvni8a8CD5s= +google.golang.org/genproto/googleapis/rpc v0.0.0-20240311173647-c811ad7063a7/go.mod h1:WtryC6hu0hhx87FDGxWCDptyssuo68sk10vYjF+T9fY= google.golang.org/grpc v1.62.1/go.mod h1:IWTG0VlJLCh1SkC58F7np9ka9mx/WNkjl4PGJaiq+QE= google.golang.org/protobuf v1.33.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos= howett.net/plist v1.0.0/go.mod h1:lqaXoTrLY4hg8tnEzNru53gicrbv7rrk+2xJA/7hw9g= diff --git a/report/generated.d.ts b/report/generated.d.ts index 0f48113c..90517a63 100644 --- a/report/generated.d.ts +++ b/report/generated.d.ts @@ -18,7 +18,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["Submittal"][]; + "application/json": components["schemas"]["db.VSubmittal"][]; }; }; /** @description Bad Request */ @@ -101,14 +101,14 @@ export interface paths { /** @description alert subscription payload */ requestBody: { content: { - "application/json": components["schemas"]["AlertSubscription"]; + "application/json": components["schemas"]["dto.AlertSubscription"]; }; }; responses: { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["AlertSubscription"][]; + "application/json": components["schemas"]["db.AlertProfileSubscription"][]; }; }; /** @description Bad Request */ @@ -139,7 +139,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["AwarePlatformParameterConfig"][]; + "application/json": components["schemas"]["service.AwarePlatformParameterConfig"][]; }; }; /** @description Bad Request */ @@ -170,7 +170,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["AwareParameter"][]; + "application/json": components["schemas"]["db.AwareParameterListRow"][]; }; }; /** @description Bad Request */ @@ -206,14 +206,14 @@ export interface paths { /** @description datalogger payload */ requestBody: { content: { - "application/json": components["schemas"]["Datalogger"]; + "application/json": components["schemas"]["dto.Datalogger"]; }; }; responses: { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["DataloggerWithKey"][]; + "application/json": components["schemas"]["service.DataloggerWithKey"]; }; }; /** @description Bad Request */ @@ -254,7 +254,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["Datalogger"]; + "application/json": components["schemas"]["db.VDatalogger"]; }; }; /** @description Bad Request */ @@ -292,14 +292,14 @@ export interface paths { /** @description datalogger payload */ requestBody: { content: { - "*/*": components["schemas"]["Datalogger"]; + "*/*": components["schemas"]["dto.Datalogger"]; }; }; responses: { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["Datalogger"]; + "application/json": components["schemas"]["db.VDatalogger"]; }; }; /** @description Bad Request */ @@ -380,14 +380,14 @@ export interface paths { /** @description equivalency table payload */ requestBody: { content: { - "*/*": components["schemas"]["EquivalencyTable"]; + "*/*": components["schemas"]["dto.EquivalencyTable"]; }; }; responses: { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["EquivalencyTable"]; + "application/json": components["schemas"]["db.VDataloggerEquivalencyTable"]; }; }; /** @description Bad Request */ @@ -428,7 +428,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["DataloggerWithKey"]; + "application/json": components["schemas"]["service.DataloggerWithKey"]; }; }; /** @description Bad Request */ @@ -471,7 +471,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["EquivalencyTable"][]; + "application/json": components["schemas"]["db.VDataloggerEquivalencyTable"][]; }; }; /** @description Bad Request */ @@ -511,14 +511,14 @@ export interface paths { /** @description equivalency table payload */ requestBody: { content: { - "*/*": components["schemas"]["EquivalencyTable"]; + "*/*": components["schemas"]["dto.EquivalencyTable"]; }; }; responses: { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["EquivalencyTable"]; + "application/json": components["schemas"]["db.VDataloggerEquivalencyTable"]; }; }; /** @description Bad Request */ @@ -558,14 +558,14 @@ export interface paths { /** @description equivalency table payload */ requestBody: { content: { - "*/*": components["schemas"]["EquivalencyTable"]; + "*/*": components["schemas"]["dto.EquivalencyTable"]; }; }; responses: { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["EquivalencyTable"]; + "application/json": components["schemas"]["db.VDataloggerEquivalencyTable"]; }; }; /** @description Bad Request */ @@ -698,7 +698,9 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["DataloggerTablePreview"]; + "application/json": { + [key: string]: unknown; + }; }; }; /** @description Bad Request */ @@ -741,7 +743,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["DataloggerTablePreview"]; + "application/json": components["schemas"]["db.VDataloggerPreview"]; }; }; /** @description Bad Request */ @@ -778,7 +780,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["Datalogger"][]; + "application/json": components["schemas"]["db.VDatalogger"][]; }; }; /** @description Bad Request */ @@ -809,7 +811,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["District"][]; + "application/json": components["schemas"]["db.VDistrict"][]; }; }; /** @description Bad Request */ @@ -834,13 +836,13 @@ export interface paths { }; }; "/domains": { - /** lists all domains */ + /** lists time zone options */ get: { responses: { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["Domain"][]; + "application/json": components["schemas"]["db.PgTimezoneNamesListRow"][]; }; }; /** @description Bad Request */ @@ -871,7 +873,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["DomainMap"]; + "application/json": components["schemas"]["service.DomainMap"]; }; }; /** @description Bad Request */ @@ -908,7 +910,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["EmailAutocompleteResult"][]; + "application/json": components["schemas"]["db.EmailAutocompleteListRow"][]; }; }; /** @description Bad Request */ @@ -946,7 +948,7 @@ export interface paths { 200: { content: { "application/json": { - [key: string]: components["schemas"]["MeasurementCollectionLean"]; + [key: string]: components["schemas"]["db.MeasurementCollectionLean"][]; }[]; }; }; @@ -978,7 +980,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["CalculatedTimeseries"][]; + "application/json": components["schemas"]["db.TimeseriesComputedListForInstrumentRow"][]; }; }; /** @description Bad Request */ @@ -1010,8 +1012,8 @@ export interface paths { }; }; responses: { - /** @description OK */ - 200: { + /** @description Created */ + 201: { content: { "application/json": { [key: string]: unknown; @@ -1056,7 +1058,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["CalculatedTimeseries"][]; + "application/json": components["schemas"]["dto.CalculatedTimeseries"][]; }; }; /** @description Bad Request */ @@ -1128,9 +1130,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": { - [key: string]: unknown; - }[]; + "application/json": components["schemas"]["service.Healthcheck"]; }; }; }; @@ -1146,10 +1146,10 @@ export interface paths { }; }; responses: { - /** @description OK */ - 200: { + /** @description Created */ + 201: { content: { - "application/json": components["schemas"]["Heartbeat"]; + "application/json": components["schemas"]["service.Heartbeat"]; }; }; }; @@ -1162,7 +1162,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["Heartbeat"]; + "application/json": components["schemas"]["service.Heartbeat"]; }; }; }; @@ -1175,7 +1175,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["Heartbeat"][]; + "application/json": components["schemas"]["service.Heartbeat"][]; }; }; }; @@ -1188,46 +1188,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["Home"]; - }; - }; - /** @description Internal Server Error */ - 500: { - content: { - "application/json": components["schemas"]["echo.HTTPError"]; - }; - }; - }; - }; - }; - "/inclinometer_explorer": { - /** list inclinometer timeseries measurements for explorer page */ - post: { - /** @description array of inclinometer instrument uuids */ - requestBody: { - content: { - "application/json": string[]; - }; - }; - responses: { - /** @description OK */ - 200: { - content: { - "application/json": { - [key: string]: components["schemas"]["InclinometerMeasurementCollectionLean"]; - }[]; - }; - }; - /** @description Bad Request */ - 400: { - content: { - "application/json": components["schemas"]["echo.HTTPError"]; - }; - }; - /** @description Not Found */ - 404: { - content: { - "application/json": components["schemas"]["echo.HTTPError"]; + "application/json": components["schemas"]["db.HomeGetRow"]; }; }; /** @description Internal Server Error */ @@ -1246,7 +1207,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["InstrumentGroup"][]; + "application/json": components["schemas"]["db.VInstrumentGroup"][]; }; }; /** @description Bad Request */ @@ -1280,14 +1241,14 @@ export interface paths { /** @description instrument group payload */ requestBody: { content: { - "*/*": components["schemas"]["InstrumentGroup"]; + "*/*": components["schemas"]["dto.InstrumentGroup"]; }; }; responses: { /** @description Created */ 201: { content: { - "application/json": components["schemas"]["InstrumentGroup"]; + "application/json": components["schemas"]["db.InstrumentGroup"][]; }; }; /** @description Bad Request */ @@ -1324,7 +1285,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["InstrumentGroup"]; + "application/json": components["schemas"]["db.VInstrumentGroup"]; }; }; /** @description Bad Request */ @@ -1362,14 +1323,14 @@ export interface paths { /** @description instrument group payload */ requestBody: { content: { - "*/*": components["schemas"]["InstrumentGroup"]; + "*/*": components["schemas"]["dto.InstrumentGroup"]; }; }; responses: { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["InstrumentGroup"]; + "application/json": components["schemas"]["db.InstrumentGroupUpdateRow"]; }; }; /** @description Bad Request */ @@ -1408,7 +1369,9 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["InstrumentGroup"][]; + "application/json": { + [key: string]: unknown; + }; }; }; /** @description Bad Request */ @@ -1445,7 +1408,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["Instrument"][]; + "application/json": components["schemas"]["db.VInstrument"][]; }; }; /** @description Bad Request */ @@ -1568,7 +1531,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["Timeseries"][]; + "application/json": components["schemas"]["db.VTimeseries"][]; }; }; /** @description Bad Request */ @@ -1605,7 +1568,9 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["MeasurementCollection"]; + "application/json": { + [key: string]: components["schemas"]["db.MeasurementCollectionLean"][]; + }; }; }; /** @description Bad Request */ @@ -1629,14 +1594,16 @@ export interface paths { }; }; }; - "/instruments": { - /** lists all instruments */ + "/instruments/count": { + /** gets the total number of non deleted instruments in the system */ get: { responses: { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["Instrument"][]; + "application/json": { + [key: string]: unknown; + }; }; }; /** @description Bad Request */ @@ -1660,14 +1627,26 @@ export interface paths { }; }; }; - "/instruments/count": { - /** gets the total number of non deleted instruments in the system */ + "/instruments/incl/{instrument_id}/measurements": { + /** creates instrument notes */ get: { + parameters: { + query: { + /** @description after time */ + after?: string; + /** @description before time */ + before: string; + }; + path: { + /** @description instrument uuid */ + instrument_id: string; + }; + }; responses: { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["InstrumentCount"]; + "application/json": components["schemas"]["db.VInclMeasurement"][]; }; }; /** @description Bad Request */ @@ -1691,16 +1670,10 @@ export interface paths { }; }; }; - "/instruments/ipi/{instrument_id}/measurements": { - /** creates instrument notes */ + "/instruments/incl/{instrument_id}/segments": { + /** gets all incl segments for an instrument */ get: { parameters: { - query: { - /** @description after time */ - after?: string; - /** @description before time */ - before: string; - }; path: { /** @description instrument uuid */ instrument_id: string; @@ -1710,7 +1683,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["IpiMeasurements"][]; + "application/json": components["schemas"]["db.VInclSegment"][]; }; }; /** @description Bad Request */ @@ -1733,21 +1706,29 @@ export interface paths { }; }; }; - }; - "/instruments/ipi/{instrument_id}/segments": { - /** gets all ipi segments for an instrument */ - get: { + /** updates multiple segments for an incl instrument */ + put: { parameters: { + query?: { + /** @description api key */ + key?: string; + }; path: { /** @description instrument uuid */ instrument_id: string; }; }; + /** @description incl instrument segments payload */ + requestBody: { + content: { + "*/*": components["schemas"]["dto.InclSegment"][]; + }; + }; responses: { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["IpiSegment"][]; + "application/json": components["schemas"]["dto.InclSegment"][]; }; }; /** @description Bad Request */ @@ -1770,29 +1751,27 @@ export interface paths { }; }; }; - /** updates multiple segments for an ipi instrument */ - put: { + }; + "/instruments/ipi/{instrument_id}/measurements": { + /** creates instrument notes */ + get: { parameters: { - query?: { - /** @description api key */ - key?: string; + query: { + /** @description after time */ + after?: string; + /** @description before time */ + before: string; }; path: { /** @description instrument uuid */ instrument_id: string; }; }; - /** @description ipi instrument segments payload */ - requestBody: { - content: { - "*/*": components["schemas"]["IpiSegment"][]; - }; - }; responses: { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["IpiSegment"][]; + "application/json": components["schemas"]["db.VIpiMeasurement"][]; }; }; /** @description Bad Request */ @@ -1816,14 +1795,65 @@ export interface paths { }; }; }; - "/instruments/notes": { - /** gets all instrument notes */ + "/instruments/ipi/{instrument_id}/segments": { + /** gets all ipi segments for an instrument */ get: { + parameters: { + path: { + /** @description instrument uuid */ + instrument_id: string; + }; + }; + responses: { + /** @description OK */ + 200: { + content: { + "application/json": components["schemas"]["db.VIpiSegment"][]; + }; + }; + /** @description Bad Request */ + 400: { + content: { + "application/json": components["schemas"]["echo.HTTPError"]; + }; + }; + /** @description Not Found */ + 404: { + content: { + "application/json": components["schemas"]["echo.HTTPError"]; + }; + }; + /** @description Internal Server Error */ + 500: { + content: { + "application/json": components["schemas"]["echo.HTTPError"]; + }; + }; + }; + }; + /** updates multiple segments for an ipi instrument */ + put: { + parameters: { + query?: { + /** @description api key */ + key?: string; + }; + path: { + /** @description instrument uuid */ + instrument_id: string; + }; + }; + /** @description ipi instrument segments payload */ + requestBody: { + content: { + "*/*": components["schemas"]["dto.IpiSegment"][]; + }; + }; responses: { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["InstrumentNote"][]; + "application/json": components["schemas"]["dto.IpiSegment"][]; }; }; /** @description Bad Request */ @@ -1846,6 +1876,8 @@ export interface paths { }; }; }; + }; + "/instruments/notes": { /** creates instrument notes */ post: { parameters: { @@ -1857,14 +1889,14 @@ export interface paths { /** @description instrument note collection payload */ requestBody: { content: { - "*/*": components["schemas"]["InstrumentNoteCollection"]; + "*/*": components["schemas"]["dto.InstrumentNoteCollection"]; }; }; responses: { - /** @description OK */ - 200: { + /** @description Created */ + 201: { content: { - "application/json": components["schemas"]["InstrumentNote"][]; + "application/json": components["schemas"]["db.InstrumentNote"][]; }; }; /** @description Bad Request */ @@ -1901,7 +1933,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["InstrumentNote"]; + "application/json": components["schemas"]["db.InstrumentNote"]; }; }; /** @description Bad Request */ @@ -1939,14 +1971,14 @@ export interface paths { /** @description instrument note collection payload */ requestBody: { content: { - "*/*": components["schemas"]["InstrumentNote"]; + "*/*": components["schemas"]["dto.InstrumentNote"]; }; }; responses: { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["AlertConfig"][]; + "application/json": components["schemas"]["db.InstrumentNote"][]; }; }; /** @description Bad Request */ @@ -1989,7 +2021,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["SaaMeasurements"][]; + "application/json": components["schemas"]["db.VSaaMeasurement"][]; }; }; /** @description Bad Request */ @@ -2026,7 +2058,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["SaaSegment"][]; + "application/json": components["schemas"]["db.VSaaSegment"][]; }; }; /** @description Bad Request */ @@ -2064,14 +2096,14 @@ export interface paths { /** @description saa instrument segments payload */ requestBody: { content: { - "*/*": components["schemas"]["SaaSegment"][]; + "*/*": components["schemas"]["dto.SaaSegment"][]; }; }; responses: { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["SaaSegment"][]; + "application/json": components["schemas"]["dto.SaaSegment"][]; }; }; /** @description Bad Request */ @@ -2108,7 +2140,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["Instrument"]; + "application/json": components["schemas"]["db.VInstrument"]; }; }; /** @description Bad Request */ @@ -2145,7 +2177,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["InstrumentNote"][]; + "application/json": components["schemas"]["db.InstrumentNote"][]; }; }; /** @description Bad Request */ @@ -2227,7 +2259,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["InstrumentStatus"][]; + "application/json": components["schemas"]["db.VInstrumentStatus"][]; }; }; /** @description Bad Request */ @@ -2265,7 +2297,7 @@ export interface paths { /** @description instrument status collection paylaod */ requestBody: { content: { - "*/*": components["schemas"]["InstrumentStatusCollection"]; + "*/*": components["schemas"]["dto.InstrumentStatusCollection"]; }; }; responses: { @@ -2313,7 +2345,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["AlertConfig"][]; + "application/json": components["schemas"]["db.VInstrumentStatus"][]; }; }; /** @description Bad Request */ @@ -2397,7 +2429,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["Submittal"][]; + "application/json": components["schemas"]["db.VSubmittal"][]; }; }; /** @description Bad Request */ @@ -2436,7 +2468,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["Timeseries"]; + "application/json": components["schemas"]["db.VTimeseries"]; }; }; /** @description Bad Request */ @@ -2483,7 +2515,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["MeasurementCollection"]; + "application/json": components["schemas"]["db.MeasurementCollection"][]; }; }; /** @description Bad Request */ @@ -2528,7 +2560,9 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["MeasurementCollection"]; + "application/json": { + [key: string]: components["schemas"]["db.MeasurementCollectionLean"][]; + }; }; }; /** @description Bad Request */ @@ -2565,7 +2599,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["AlertSubscription"][]; + "application/json": components["schemas"]["db.AlertProfileSubscription"][]; }; }; /** @description Bad Request */ @@ -2605,7 +2639,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["Alert"][]; + "application/json": components["schemas"]["db.AlertListForProfileRow"][]; }; }; /** @description Bad Request */ @@ -2647,10 +2681,10 @@ export interface paths { }; }; responses: { - /** @description OK */ - 200: { + /** @description Created */ + 201: { content: { - "application/json": components["schemas"]["Alert"]; + "application/json": components["schemas"]["db.AlertGetRow"]; }; }; /** @description Bad Request */ @@ -2695,7 +2729,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["Alert"]; + "application/json": components["schemas"]["db.AlertGetRow"]; }; }; /** @description Bad Request */ @@ -2726,7 +2760,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["Profile"]; + "application/json": components["schemas"]["db.VProfile"]; }; }; /** @description Bad Request */ @@ -2763,7 +2797,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["Project"][]; + "application/json": components["schemas"]["db.VProject"][]; }; }; /** @description Bad Request */ @@ -2794,7 +2828,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["Token"]; + "application/json": components["schemas"]["service.Token"]; }; }; /** @description Bad Request */ @@ -2857,37 +2891,6 @@ export interface paths { }; }; }; - "/opendcs/sites": { - /** lists all instruments, represented as opendcs sites */ - get: { - responses: { - /** @description OK */ - 200: { - content: { - "text/xml": components["schemas"]["Site"][]; - }; - }; - /** @description Bad Request */ - 400: { - content: { - "text/xml": components["schemas"]["echo.HTTPError"]; - }; - }; - /** @description Not Found */ - 404: { - content: { - "text/xml": components["schemas"]["echo.HTTPError"]; - }; - }; - /** @description Internal Server Error */ - 500: { - content: { - "text/xml": components["schemas"]["echo.HTTPError"]; - }; - }; - }; - }; - }; "/profiles": { /** creates a user profile */ post: { @@ -2895,7 +2898,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["Profile"]; + "application/json": components["schemas"]["db.ProfileCreateRow"]; }; }; /** @description Bad Request */ @@ -2932,7 +2935,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["Project"][]; + "application/json": components["schemas"]["db.VProject"][]; }; }; /** @description Bad Request */ @@ -2966,14 +2969,14 @@ export interface paths { /** @description project collection payload */ requestBody: { content: { - "*/*": components["schemas"]["Project"][]; + "*/*": components["schemas"]["dto.Project"][]; }; }; responses: { - /** @description OK */ - 200: { + /** @description Created */ + 201: { content: { - "application/json": components["schemas"]["IDSlugName"][]; + "application/json": components["schemas"]["db.ProjectCreateBatchRow"][]; }; }; /** @description Bad Request */ @@ -3004,7 +3007,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["ProjectCount"]; + "application/json": components["schemas"]["service.ProjectCount"]; }; }; /** @description Bad Request */ @@ -3041,7 +3044,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["Project"]; + "application/json": components["schemas"]["db.VProject"]; }; }; /** @description Bad Request */ @@ -3079,14 +3082,14 @@ export interface paths { /** @description project payload */ requestBody: { content: { - "*/*": components["schemas"]["Project"]; + "*/*": components["schemas"]["dto.Project"]; }; }; responses: { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["Project"]; + "application/json": components["schemas"]["db.VProject"]; }; }; /** @description Bad Request */ @@ -3164,7 +3167,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["AlertConfig"][]; + "application/json": components["schemas"]["db.VAlertConfig"][]; }; }; /** @description Bad Request */ @@ -3202,14 +3205,14 @@ export interface paths { /** @description alert config payload */ requestBody: { content: { - "application/json": components["schemas"]["AlertConfig"]; + "application/json": components["schemas"]["dto.AlertConfig"]; }; }; responses: { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["AlertConfig"]; + "application/json": components["schemas"]["db.VAlertConfig"]; }; }; /** @description Bad Request */ @@ -3248,7 +3251,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["AlertConfig"]; + "application/json": components["schemas"]["db.VAlertConfig"]; }; }; /** @description Bad Request */ @@ -3288,14 +3291,14 @@ export interface paths { /** @description alert config payload */ requestBody: { content: { - "application/json": components["schemas"]["AlertConfig"]; + "application/json": components["schemas"]["dto.AlertConfig"]; }; }; responses: { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["AlertConfig"][]; + "application/json": components["schemas"]["db.VAlertConfig"]; }; }; /** @description Bad Request */ @@ -3336,7 +3339,9 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["AlertConfig"][]; + "application/json": { + [key: string]: unknown; + }; }; }; /** @description Bad Request */ @@ -3373,7 +3378,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["AlertConfig"][]; + "application/json": components["schemas"]["db.CollectionGroup"][]; }; }; /** @description Bad Request */ @@ -3414,14 +3419,14 @@ export interface paths { /** @description collection group payload */ requestBody: { content: { - "*/*": components["schemas"]["CollectionGroup"]; + "*/*": components["schemas"]["dto.CollectionGroup"]; }; }; responses: { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["CollectionGroup"][]; + "application/json": components["schemas"]["db.CollectionGroup"][]; }; }; /** @description Bad Request */ @@ -3460,7 +3465,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["CollectionGroupDetails"]; + "application/json": components["schemas"]["db.VCollectionGroupDetails"]; }; }; /** @description Bad Request */ @@ -3500,14 +3505,14 @@ export interface paths { /** @description collection group payload */ requestBody: { content: { - "*/*": components["schemas"]["CollectionGroup"]; + "*/*": components["schemas"]["dto.CollectionGroup"]; }; }; responses: { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["CollectionGroup"]; + "application/json": components["schemas"]["db.CollectionGroup"]; }; }; /** @description Bad Request */ @@ -3575,8 +3580,8 @@ export interface paths { }; }; "/projects/{project_id}/collection_groups/{collection_group_id}/timeseries/{timeseries_id}": { - /** adds a timeseries to a collection group */ - post: { + /** updates sort order for collection group timesries */ + put: { parameters: { query?: { /** @description api key */ @@ -3620,8 +3625,8 @@ export interface paths { }; }; }; - /** removes a timeseries from a collection group */ - delete: { + /** adds a timeseries to a collection group */ + post: { parameters: { query?: { /** @description api key */ @@ -3637,8 +3642,8 @@ export interface paths { }; }; responses: { - /** @description OK */ - 200: { + /** @description Created */ + 201: { content: { "application/json": { [key: string]: unknown; @@ -3665,21 +3670,29 @@ export interface paths { }; }; }; - }; - "/projects/{project_id}/district_rollup/evaluation_submittals": { - /** lists monthly evaluation statistics for a district by project id */ - get: { + /** removes a timeseries from a collection group */ + delete: { parameters: { + query?: { + /** @description api key */ + key?: string; + }; path: { - /** @description project id */ + /** @description project uuid */ project_id: string; + /** @description collection group uuid */ + collection_group_id: string; + /** @description timeseries uuid */ + timeseries_id: string; }; }; responses: { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["DistrictRollup"][]; + "application/json": { + [key: string]: unknown; + }; }; }; /** @description Bad Request */ @@ -3703,8 +3716,8 @@ export interface paths { }; }; }; - "/projects/{project_id}/district_rollup/measurement_submittals": { - /** lists monthly measurement statistics for a district by project id */ + "/projects/{project_id}/district_rollup/evaluation_submittals": { + /** lists monthly evaluation statistics for a district by project id */ get: { parameters: { path: { @@ -3716,7 +3729,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["DistrictRollup"][]; + "application/json": components["schemas"]["db.VDistrictRollup"][]; }; }; /** @description Bad Request */ @@ -3740,12 +3753,12 @@ export interface paths { }; }; }; - "/projects/{project_id}/evaluations": { - /** lists evaluations for a single project optionally filtered by alert_config_id */ + "/projects/{project_id}/district_rollup/measurement_submittals": { + /** lists monthly measurement statistics for a district by project id */ get: { parameters: { path: { - /** @description project uuid */ + /** @description project id */ project_id: string; }; }; @@ -3753,7 +3766,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["Evaluation"][]; + "application/json": components["schemas"]["db.VDistrictRollup"][]; }; }; /** @description Bad Request */ @@ -3776,29 +3789,21 @@ export interface paths { }; }; }; - /** creates one evaluation */ - post: { + }; + "/projects/{project_id}/evaluations": { + /** lists evaluations for a single project optionally filtered by alert_config_id */ + get: { parameters: { - query?: { - /** @description api key */ - key?: string; - }; path: { /** @description project uuid */ project_id: string; }; }; - /** @description evaluation payload */ - requestBody: { - content: { - "*/*": components["schemas"]["Evaluation"]; - }; - }; responses: { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["Evaluation"]; + "application/json": components["schemas"]["db.VEvaluation"][]; }; }; /** @description Bad Request */ @@ -3821,23 +3826,29 @@ export interface paths { }; }; }; - }; - "/projects/{project_id}/evaluations/{evaluation_id}": { - /** gets a single evaluation by id */ - get: { + /** creates one evaluation */ + post: { parameters: { + query?: { + /** @description api key */ + key?: string; + }; path: { /** @description project uuid */ project_id: string; - /** @description evaluation uuid */ - evaluation_id: string; + }; + }; + /** @description evaluation payload */ + requestBody: { + content: { + "*/*": components["schemas"]["dto.Evaluation"]; }; }; responses: { - /** @description OK */ - 200: { + /** @description Created */ + 201: { content: { - "application/json": components["schemas"]["Evaluation"]; + "application/json": components["schemas"]["db.VEvaluation"]; }; }; /** @description Bad Request */ @@ -3860,13 +3871,11 @@ export interface paths { }; }; }; - /** updates an existing evaluation */ - put: { + }; + "/projects/{project_id}/evaluations/{evaluation_id}": { + /** gets a single evaluation by id */ + get: { parameters: { - query?: { - /** @description api key */ - key?: string; - }; path: { /** @description project uuid */ project_id: string; @@ -3874,17 +3883,11 @@ export interface paths { evaluation_id: string; }; }; - /** @description evaluation payload */ - requestBody: { - content: { - "*/*": components["schemas"]["Evaluation"]; - }; - }; responses: { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["Evaluation"]; + "application/json": components["schemas"]["db.VEvaluation"]; }; }; /** @description Bad Request */ @@ -3907,8 +3910,8 @@ export interface paths { }; }; }; - /** deletes an evaluation */ - delete: { + /** updates an existing evaluation */ + put: { parameters: { query?: { /** @description api key */ @@ -3921,11 +3924,17 @@ export interface paths { evaluation_id: string; }; }; + /** @description evaluation payload */ + requestBody: { + content: { + "*/*": components["schemas"]["dto.Evaluation"]; + }; + }; responses: { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["AlertConfig"][]; + "application/json": components["schemas"]["db.VEvaluation"]; }; }; /** @description Bad Request */ @@ -3948,18 +3957,18 @@ export interface paths { }; }; }; - }; - "/projects/{project_id}/images": { - /** uploades a picture for a project */ - post: { + /** deletes an evaluation */ + delete: { parameters: { query?: { /** @description api key */ key?: string; }; path: { - /** @description project id */ + /** @description project uuid */ project_id: string; + /** @description evaluation uuid */ + evaluation_id: string; }; }; responses: { @@ -3967,8 +3976,8 @@ export interface paths { 200: { content: { "application/json": { - [key: string]: unknown; - }; + [key: string]: unknown; + }[]; }; }; /** @description Bad Request */ @@ -3992,8 +4001,8 @@ export interface paths { }; }; }; - "/projects/{project_id}/inclinometer_measurements": { - /** creates or updates one or more inclinometer measurements */ + "/projects/{project_id}/images": { + /** uploades a picture for a project */ post: { parameters: { query?: { @@ -4001,21 +4010,17 @@ export interface paths { key?: string; }; path: { - /** @description project uuid */ + /** @description project id */ project_id: string; }; }; - /** @description inclinometer measurement collections */ - requestBody: { - content: { - "*/*": components["schemas"]["InclinometerMeasurementCollectionCollection"]; - }; - }; responses: { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["InclinometerMeasurementCollection"][]; + "application/json": { + [key: string]: unknown; + }; }; }; /** @description Bad Request */ @@ -4052,7 +4057,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["InstrumentGroup"][]; + "application/json": components["schemas"]["db.VInstrumentGroup"][]; }; }; /** @description Bad Request */ @@ -4089,7 +4094,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["Project"][]; + "application/json": components["schemas"]["db.VInstrument"][]; }; }; /** @description Bad Request */ @@ -4129,14 +4134,14 @@ export interface paths { /** @description instrument collection payload */ requestBody: { content: { - "*/*": components["schemas"]["Instrument"][]; + "*/*": components["schemas"]["dto.Instrument"][]; }; }; responses: { - /** @description OK */ - 200: { + /** @description Created */ + 201: { content: { - "application/json": components["schemas"]["IDSlugName"][]; + "application/json": components["schemas"]["db.InstrumentCreateBatchRow"][]; }; }; /** @description Bad Request */ @@ -4181,14 +4186,14 @@ export interface paths { /** @description instrument uuids */ requestBody: { content: { - "*/*": components["schemas"]["ProjectInstrumentAssignments"]; + "*/*": components["schemas"]["dto.ProjectInstrumentAssignments"]; }; }; responses: { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["InstrumentsValidation"]; + "application/json": components["schemas"]["service.InstrumentsValidation"]; }; }; /** @description Bad Request */ @@ -4230,14 +4235,14 @@ export interface paths { /** @description instrument payload */ requestBody: { content: { - "*/*": components["schemas"]["Instrument"]; + "*/*": components["schemas"]["dto.Instrument"]; }; }; responses: { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["Instrument"]; + "application/json": components["schemas"]["db.VInstrument"]; }; }; /** @description Bad Request */ @@ -4319,7 +4324,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["AlertConfig"][]; + "application/json": components["schemas"]["db.VAlertConfig"][]; }; }; /** @description Bad Request */ @@ -4361,10 +4366,10 @@ export interface paths { }; }; responses: { - /** @description OK */ - 200: { + /** @description Created */ + 201: { content: { - "application/json": components["schemas"]["AlertSubscription"]; + "application/json": components["schemas"]["db.AlertProfileSubscription"]; }; }; /** @description Bad Request */ @@ -4453,7 +4458,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["Alert"][]; + "application/json": components["schemas"]["db.VAlert"][]; }; }; /** @description Bad Request */ @@ -4500,14 +4505,14 @@ export interface paths { /** @description project uuids */ requestBody: { content: { - "*/*": components["schemas"]["InstrumentProjectAssignments"]; + "*/*": components["schemas"]["dto.InstrumentProjectAssignments"]; }; }; responses: { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["InstrumentsValidation"]; + "application/json": components["schemas"]["service.InstrumentsValidation"]; }; }; /** @description Bad Request */ @@ -4548,10 +4553,10 @@ export interface paths { }; }; responses: { - /** @description OK */ - 200: { + /** @description Created */ + 201: { content: { - "application/json": components["schemas"]["InstrumentsValidation"]; + "application/json": components["schemas"]["service.InstrumentsValidation"]; }; }; /** @description Bad Request */ @@ -4597,7 +4602,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["InstrumentsValidation"]; + "application/json": components["schemas"]["service.InstrumentsValidation"]; }; }; /** @description Bad Request */ @@ -4636,7 +4641,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["Timeseries"][]; + "application/json": components["schemas"]["db.VTimeseries"][]; }; }; /** @description Bad Request */ @@ -4676,14 +4681,14 @@ export interface paths { /** @description timeseries collection items payload */ requestBody: { content: { - "*/*": components["schemas"]["TimeseriesCollectionItems"]; + "*/*": components["schemas"]["dto.TimeseriesCollectionItems"]; }; }; responses: { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["Timeseries"][]; + "application/json": components["schemas"]["db.TimeseriesCreateBatchRow"][]; }; }; /** @description Bad Request */ @@ -4769,7 +4774,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["Evaluation"][]; + "application/json": components["schemas"]["dto.Evaluation"][]; }; }; /** @description Bad Request */ @@ -4811,14 +4816,14 @@ export interface paths { /** @description instrument payload */ requestBody: { content: { - "*/*": components["schemas"]["Instrument"]; + "*/*": components["schemas"]["dto.Instrument"]; }; }; responses: { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["Instrument"]; + "application/json": components["schemas"]["db.VInstrument"]; }; }; /** @description Bad Request */ @@ -4857,7 +4862,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["Timeseries"][]; + "application/json": components["schemas"]["db.VTimeseries"][]; }; }; /** @description Bad Request */ @@ -4896,7 +4901,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["TimeseriesCwms"][]; + "application/json": components["schemas"]["db.VTimeseriesCwms"][]; }; }; /** @description Bad Request */ @@ -4932,14 +4937,16 @@ export interface paths { /** @description array of cwms timeseries to create */ requestBody: { content: { - "*/*": components["schemas"]["TimeseriesCwms"][]; + "*/*": components["schemas"]["dto.TimeseriesCwms"][]; }; }; responses: { - /** @description OK */ - 200: { + /** @description Created */ + 201: { content: { - "application/json": components["schemas"]["TimeseriesCwms"][]; + "application/json": { + [key: string]: unknown; + }; }; }; /** @description Bad Request */ @@ -4979,14 +4986,16 @@ export interface paths { /** @description cwms timeseries to update */ requestBody: { content: { - "*/*": components["schemas"]["TimeseriesCwms"]; + "*/*": components["schemas"]["dto.TimeseriesCwms"]; }; }; responses: { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["TimeseriesCwms"][]; + "application/json": { + [key: string]: unknown; + }[]; }; }; /** @description Bad Request */ @@ -5027,7 +5036,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["ProjectMembership"][]; + "application/json": components["schemas"]["db.ProfileProjectRoleListForProjectRow"][]; }; }; /** @description Bad Request */ @@ -5069,10 +5078,10 @@ export interface paths { }; }; responses: { - /** @description OK */ - 200: { + /** @description Created */ + 201: { content: { - "application/json": components["schemas"]["ProjectMembership"]; + "application/json": components["schemas"]["db.ProfileProjectRoleGetRow"]; }; }; /** @description Bad Request */ @@ -5154,7 +5163,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["PlotConfig"][]; + "application/json": components["schemas"]["db.VPlotConfiguration"][]; }; }; /** @description Bad Request */ @@ -5194,14 +5203,14 @@ export interface paths { /** @description plot config payload */ requestBody: { content: { - "*/*": components["schemas"]["PlotConfigBullseyePlot"]; + "*/*": components["schemas"]["dto.PlotConfigBullseyePlot"]; }; }; responses: { - /** @description OK */ - 200: { + /** @description Created */ + 201: { content: { - "application/json": components["schemas"]["PlotConfig"]; + "application/json": components["schemas"]["db.VPlotConfiguration"]; }; }; /** @description Bad Request */ @@ -5243,14 +5252,14 @@ export interface paths { /** @description plot config payload */ requestBody: { content: { - "*/*": components["schemas"]["PlotConfigBullseyePlot"]; + "*/*": components["schemas"]["dto.PlotConfigBullseyePlot"]; }; }; responses: { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["PlotConfig"]; + "application/json": components["schemas"]["db.VPlotConfiguration"]; }; }; /** @description Bad Request */ @@ -5293,7 +5302,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["PlotConfigMeasurementBullseyePlot"][]; + "application/json": components["schemas"]["db.PlotConfigMeasurementListBullseyeRow"][]; }; }; /** @description Bad Request */ @@ -5333,14 +5342,14 @@ export interface paths { /** @description plot config payload */ requestBody: { content: { - "*/*": components["schemas"]["PlotConfigContourPlot"]; + "*/*": components["schemas"]["dto.PlotConfigContourPlot"]; }; }; responses: { - /** @description OK */ - 200: { + /** @description Created */ + 201: { content: { - "application/json": components["schemas"]["PlotConfig"]; + "application/json": components["schemas"]["db.VPlotConfiguration"]; }; }; /** @description Bad Request */ @@ -5382,14 +5391,14 @@ export interface paths { /** @description plot config payload */ requestBody: { content: { - "*/*": components["schemas"]["PlotConfigContourPlot"]; + "*/*": components["schemas"]["dto.PlotConfigContourPlot"]; }; }; responses: { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["PlotConfig"]; + "application/json": components["schemas"]["db.VPlotConfiguration"]; }; }; /** @description Bad Request */ @@ -5434,7 +5443,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["AggregatePlotConfigMeasurementsContourPlot"]; + "application/json": components["schemas"]["service.AggregatePlotConfigMeasurementsContourPlot"]; }; }; /** @description Bad Request */ @@ -5521,14 +5530,14 @@ export interface paths { /** @description plot config payload */ requestBody: { content: { - "*/*": components["schemas"]["PlotConfigProfilePlot"]; + "*/*": components["schemas"]["dto.PlotConfigProfilePlot"]; }; }; responses: { - /** @description OK */ - 200: { + /** @description Created */ + 201: { content: { - "application/json": components["schemas"]["PlotConfig"]; + "application/json": components["schemas"]["db.VPlotConfiguration"]; }; }; /** @description Bad Request */ @@ -5570,14 +5579,14 @@ export interface paths { /** @description plot config payload */ requestBody: { content: { - "*/*": components["schemas"]["PlotConfigProfilePlot"]; + "*/*": components["schemas"]["dto.PlotConfigProfilePlot"]; }; }; responses: { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["PlotConfig"]; + "application/json": components["schemas"]["db.VPlotConfiguration"]; }; }; /** @description Bad Request */ @@ -5617,14 +5626,14 @@ export interface paths { /** @description plot config payload */ requestBody: { content: { - "*/*": components["schemas"]["PlotConfigScatterLinePlot"]; + "*/*": components["schemas"]["dto.PlotConfigScatterLinePlot"]; }; }; responses: { - /** @description OK */ - 200: { + /** @description Created */ + 201: { content: { - "application/json": components["schemas"]["PlotConfig"]; + "application/json": components["schemas"]["db.VPlotConfiguration"]; }; }; /** @description Bad Request */ @@ -5666,14 +5675,14 @@ export interface paths { /** @description plot config payload */ requestBody: { content: { - "*/*": components["schemas"]["PlotConfigScatterLinePlot"]; + "*/*": components["schemas"]["dto.PlotConfigScatterLinePlot"]; }; }; responses: { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["PlotConfig"]; + "application/json": components["schemas"]["db.VPlotConfiguration"]; }; }; /** @description Bad Request */ @@ -5712,7 +5721,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["PlotConfig"]; + "application/json": components["schemas"]["db.VPlotConfiguration"]; }; }; /** @description Bad Request */ @@ -5792,7 +5801,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["PlotConfig"][]; + "application/json": components["schemas"]["db.VPlotConfiguration"][]; }; }; /** @description Bad Request */ @@ -5830,14 +5839,14 @@ export interface paths { /** @description plot config payload */ requestBody: { content: { - "*/*": components["schemas"]["PlotConfigScatterLinePlot"]; + "*/*": components["schemas"]["dto.PlotConfigScatterLinePlot"]; }; }; responses: { - /** @description OK */ - 200: { + /** @description Created */ + 201: { content: { - "application/json": components["schemas"]["PlotConfig"]; + "application/json": components["schemas"]["db.VPlotConfiguration"]; }; }; /** @description Bad Request */ @@ -5876,7 +5885,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["PlotConfig"]; + "application/json": components["schemas"]["db.VPlotConfiguration"]; }; }; /** @description Bad Request */ @@ -5916,14 +5925,14 @@ export interface paths { /** @description plot config payload */ requestBody: { content: { - "*/*": components["schemas"]["PlotConfigScatterLinePlot"]; + "*/*": components["schemas"]["dto.PlotConfigScatterLinePlot"]; }; }; responses: { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["PlotConfig"]; + "application/json": components["schemas"]["db.VPlotConfiguration"]; }; }; /** @description Bad Request */ @@ -6007,7 +6016,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["ReportConfig"]; + "application/json": components["schemas"]["db.VReportConfig"]; }; }; /** @description Bad Request */ @@ -6045,14 +6054,14 @@ export interface paths { /** @description report config payload */ requestBody: { content: { - "application/json": components["schemas"]["ReportConfig"]; + "application/json": components["schemas"]["dto.ReportConfig"]; }; }; responses: { /** @description Created */ 201: { content: { - "application/json": components["schemas"]["ReportConfig"]; + "application/json": components["schemas"]["db.VReportConfig"]; }; }; /** @description Bad Request */ @@ -6094,7 +6103,7 @@ export interface paths { /** @description report config payload */ requestBody: { content: { - "application/json": components["schemas"]["ReportConfig"]; + "application/json": components["schemas"]["dto.ReportConfig"]; }; }; responses: { @@ -6189,7 +6198,7 @@ export interface paths { /** @description Created */ 201: { content: { - "application/json": components["schemas"]["ReportDownloadJob"]; + "application/json": components["schemas"]["db.ReportDownloadJob"]; }; }; /** @description Bad Request */ @@ -6234,7 +6243,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["ReportDownloadJob"]; + "application/json": components["schemas"]["db.ReportDownloadJob"]; }; }; /** @description Bad Request */ @@ -6316,7 +6325,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["Submittal"][]; + "application/json": components["schemas"]["db.VSubmittal"][]; }; }; /** @description Bad Request */ @@ -6353,7 +6362,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["Timeseries"][]; + "application/json": components["schemas"]["db.VTimeseries"][]; }; }; /** @description Bad Request */ @@ -6397,14 +6406,16 @@ export interface paths { /** @description array of timeseries measurement collections */ requestBody: { content: { - "*/*": components["schemas"]["TimeseriesMeasurementCollectionCollection"]; + "*/*": components["schemas"]["dto.TimeseriesMeasurementCollectionCollection"]; }; }; responses: { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["MeasurementCollection"][]; + "application/json": { + [key: string]: unknown; + }[]; }; }; /** @description Bad Request */ @@ -6442,14 +6453,16 @@ export interface paths { /** @description array of timeseries measurement collections */ requestBody: { content: { - "application/json": components["schemas"]["TimeseriesMeasurementCollectionCollection"]; + "application/json": components["schemas"]["dto.TimeseriesMeasurementCollectionCollection"]; }; }; responses: { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["MeasurementCollection"][]; + "application/json": { + [key: string]: unknown; + }; }; }; /** @description Bad Request */ @@ -6473,62 +6486,51 @@ export interface paths { }; }; }; - "/projects/{project_slug}/images/{uri_path}": { - /** serves media, files, etc for a given project */ + "/projects/{project_id}/uploader_configs": { + /** lists uploader configs for a project */ get: { parameters: { path: { - /** @description project abbr */ - project_slug: string; - /** @description uri path of requested resource */ - uri_path: string; + /** @description project uuid */ + project_id: string; }; }; responses: { /** @description OK */ 200: { content: { + "application/json": components["schemas"]["db.VUploaderConfig"][]; }; }; /** @description Bad Request */ 400: { content: { - "image/jpeg": components["schemas"]["echo.HTTPError"]; - }; - }; - /** @description Not Found */ - 404: { - content: { - "image/jpeg": components["schemas"]["echo.HTTPError"]; - }; - }; - /** @description Internal Server Error */ - 500: { - content: { - "image/jpeg": components["schemas"]["echo.HTTPError"]; + "application/json": components["schemas"]["echo.HTTPError"]; }; }; }; }; - }; - "/report_configs/{report_config_id}/plot_configs": { - /** Lists all plot configs for a report config */ - get: { + /** creates an uploader config */ + post: { parameters: { - query: { - /** @description api key */ - key: string; - }; path: { - /** @description report config uuid */ - report_config_id: string; + /** @description project uuid */ + project_id: string; + }; + }; + /** @description uploader config payload */ + requestBody: { + content: { + "*/*": components["schemas"]["dto.UploaderConfig"]; }; }; responses: { - /** @description OK */ - 200: { + /** @description Created */ + 201: { content: { - "application/json": components["schemas"]["ReportConfigWithPlotConfigs"]; + "application/json": { + [key: string]: unknown; + }; }; }; /** @description Bad Request */ @@ -6537,38 +6539,24 @@ export interface paths { "application/json": components["schemas"]["echo.HTTPError"]; }; }; - /** @description Not Found */ - 404: { - content: { - "application/json": components["schemas"]["echo.HTTPError"]; - }; - }; - /** @description Internal Server Error */ - 500: { - content: { - "application/json": components["schemas"]["echo.HTTPError"]; - }; - }; }; }; }; - "/report_jobs/{job_id}": { - /** updates a job that creates a pdf report */ + "/projects/{project_id}/uploader_configs/{uploader_config_id}": { + /** updates an uploader config */ put: { parameters: { - query: { - /** @description api key */ - key: string; - }; path: { - /** @description download job uuid */ - job_id: string; + /** @description project uuid */ + project_id: string; + /** @description uploader config uuid */ + uploader_config_id: string; }; }; - /** @description report download job payload */ + /** @description uploader config payload */ requestBody: { content: { - "application/json": components["schemas"]["ReportDownloadJob"]; + "*/*": components["schemas"]["dto.UploaderConfig"]; }; }; responses: { @@ -6586,39 +6574,25 @@ export interface paths { "application/json": components["schemas"]["echo.HTTPError"]; }; }; - /** @description Not Found */ - 404: { - content: { - "application/json": components["schemas"]["echo.HTTPError"]; - }; - }; - /** @description Internal Server Error */ - 500: { - content: { - "application/json": components["schemas"]["echo.HTTPError"]; - }; - }; }; }; - }; - "/search/{entity}": { - /** allows searching using a string on different entities */ - get: { + /** deletes an uploader config */ + delete: { parameters: { - query?: { - /** @description search string */ - q?: string; - }; path: { - /** @description entity to search (i.e. projects, etc.) */ - entity: string; + /** @description project uuid */ + project_id: string; + /** @description uploader config uuid */ + uploader_config_id: string; }; }; responses: { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["SearchResult"][]; + "application/json": { + [key: string]: unknown; + }; }; }; /** @description Bad Request */ @@ -6627,41 +6601,25 @@ export interface paths { "application/json": components["schemas"]["echo.HTTPError"]; }; }; - /** @description Not Found */ - 404: { - content: { - "application/json": components["schemas"]["echo.HTTPError"]; - }; - }; - /** @description Internal Server Error */ - 500: { - content: { - "application/json": components["schemas"]["echo.HTTPError"]; - }; - }; }; }; }; - "/submittals/{submittal_id}/verify_missing": { - /** verifies the specified submittal is "missing" and will not be completed */ - put: { + "/projects/{project_id}/uploader_configs/{uploader_config_id}/mappings": { + /** lists timeseries mappings for an uploader config */ + get: { parameters: { - query?: { - /** @description api key */ - key?: string; - }; path: { - /** @description submittal uuid */ - submittal_id: string; + /** @description project uuid */ + project_id: string; + /** @description uploader config uuid */ + uploader_config_id: string; }; }; responses: { /** @description OK */ 200: { content: { - "application/json": { - [key: string]: unknown; - }; + "application/json": components["schemas"]["db.UploaderConfigMapping"][]; }; }; /** @description Bad Request */ @@ -6670,34 +6628,82 @@ export interface paths { "application/json": components["schemas"]["echo.HTTPError"]; }; }; - /** @description Not Found */ - 404: { + }; + }; + /** updates mappings for an uploader config */ + put: { + parameters: { + path: { + /** @description project uuid */ + project_id: string; + /** @description uploader config uuid */ + uploader_config_id: string; + }; + }; + /** @description uploader config mappings payload */ + requestBody: { + content: { + "*/*": components["schemas"]["dto.UploaderConfigMapping"][]; + }; + }; + responses: { + /** @description OK */ + 200: { content: { - "application/json": components["schemas"]["echo.HTTPError"]; + "application/json": { + [key: string]: unknown; + }; }; }; - /** @description Internal Server Error */ - 500: { + /** @description Bad Request */ + 400: { content: { "application/json": components["schemas"]["echo.HTTPError"]; }; }; }; }; - }; - "/timeseries": { - /** creates one or more timeseries */ + /** creates mappings for an uploader config */ post: { parameters: { - query?: { - /** @description api key */ - key?: string; + path: { + /** @description project uuid */ + project_id: string; + /** @description uploader config uuid */ + uploader_config_id: string; }; }; - /** @description timeseries collection items payload */ + /** @description uploader config mappings payload */ requestBody: { content: { - "*/*": components["schemas"]["TimeseriesCollectionItems"]; + "*/*": components["schemas"]["dto.UploaderConfigMapping"][]; + }; + }; + responses: { + /** @description Created */ + 201: { + content: { + "application/json": { + [key: string]: unknown; + }; + }; + }; + /** @description Bad Request */ + 400: { + content: { + "application/json": components["schemas"]["echo.HTTPError"]; + }; + }; + }; + }; + /** updates mappings for an uploader config */ + delete: { + parameters: { + path: { + /** @description project uuid */ + project_id: string; + /** @description uploader config uuid */ + uploader_config_id: string; }; }; responses: { @@ -6705,8 +6711,8 @@ export interface paths { 200: { content: { "application/json": { - [key: string]: string; - }[]; + [key: string]: unknown; + }; }; }; /** @description Bad Request */ @@ -6715,35 +6721,66 @@ export interface paths { "application/json": components["schemas"]["echo.HTTPError"]; }; }; + }; + }; + }; + "/projects/{project_slug}/images/{uri_path}": { + /** serves media, files, etc for a given project */ + get: { + parameters: { + path: { + /** @description project abbr */ + project_slug: string; + /** @description uri path of requested resource */ + uri_path: string; + }; + }; + responses: { + /** @description OK */ + 200: { + content: { + "image/jpeg": string; + }; + }; + /** @description Bad Request */ + 400: { + content: { + "image/jpeg": components["schemas"]["echo.HTTPError"]; + }; + }; /** @description Not Found */ 404: { content: { - "application/json": components["schemas"]["echo.HTTPError"]; + "image/jpeg": components["schemas"]["echo.HTTPError"]; }; }; /** @description Internal Server Error */ 500: { content: { - "application/json": components["schemas"]["echo.HTTPError"]; + "image/jpeg": components["schemas"]["echo.HTTPError"]; }; }; }; }; }; - "/timeseries/{timeseries_id}": { - /** gets a single timeseries by id */ + "/report_configs/{report_config_id}/plot_configs": { + /** Lists all plot configs for a report config */ get: { parameters: { + query: { + /** @description api key */ + key: string; + }; path: { - /** @description timeseries uuid */ - timeseries_id: string; + /** @description report config uuid */ + report_config_id: string; }; }; responses: { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["Timeseries"]; + "application/json": components["schemas"]["service.ReportConfigWithPlotConfigs"]; }; }; /** @description Bad Request */ @@ -6766,22 +6803,24 @@ export interface paths { }; }; }; - /** updates a single timeseries by id */ + }; + "/report_jobs/{job_id}": { + /** updates a job that creates a pdf report */ put: { parameters: { - query?: { + query: { /** @description api key */ - key?: string; + key: string; }; path: { - /** @description timeseries uuid */ - timeseries_id: string; + /** @description download job uuid */ + job_id: string; }; }; - /** @description timeseries payload */ + /** @description report download job payload */ requestBody: { content: { - "*/*": components["schemas"]["Timeseries"]; + "application/json": components["schemas"]["dto.ReportDownloadJob"]; }; }; responses: { @@ -6789,7 +6828,7 @@ export interface paths { 200: { content: { "application/json": { - [key: string]: string; + [key: string]: unknown; }; }; }; @@ -6813,25 +6852,25 @@ export interface paths { }; }; }; - /** deletes a single timeseries by id */ - delete: { + }; + "/search/projects": { + /** allows searching using a string on different entities */ + get: { parameters: { query?: { - /** @description api key */ - key?: string; + /** @description search string */ + q?: string; }; path: { - /** @description timeseries uuid */ - timeseries_id: string; + /** @description entity to search (i.e. projects, etc.) */ + entity: string; }; }; responses: { /** @description OK */ 200: { content: { - "application/json": { - [key: string]: unknown; - }; + "application/json": components["schemas"]["db.VProject"][]; }; }; /** @description Bad Request */ @@ -6855,26 +6894,26 @@ export interface paths { }; }; }; - "/timeseries/{timeseries_id}/inclinometer_measurements": { - /** lists all measurements for an inclinometer */ - get: { + "/submittals/{submittal_id}/verify_missing": { + /** verifies the specified submittal is "missing" and will not be completed */ + put: { parameters: { query?: { - /** @description after timestamp */ - after?: string; - /** @description before timestamp */ - before?: string; + /** @description api key */ + key?: string; }; path: { - /** @description timeseries uuid */ - timeseries_id: string; + /** @description submittal uuid */ + submittal_id: string; }; }; responses: { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["InclinometerMeasurementCollection"]; + "application/json": { + [key: string]: unknown; + }; }; }; /** @description Bad Request */ @@ -6897,18 +6936,20 @@ export interface paths { }; }; }; - /** deletes a single inclinometer measurement by timestamp */ - delete: { + }; + "/timeseries": { + /** creates one or more timeseries */ + post: { parameters: { - query: { - /** @description timestamp of measurement to delete */ - time: string; + query?: { /** @description api key */ key?: string; }; - path: { - /** @description timeseries uuid */ - timeseries_id: string; + }; + /** @description timeseries collection items payload */ + requestBody: { + content: { + "*/*": components["schemas"]["dto.TimeseriesCollectionItems"]; }; }; responses: { @@ -6916,7 +6957,7 @@ export interface paths { 200: { content: { "application/json": { - [key: string]: unknown; + [key: string]: string; }; }; }; @@ -6941,18 +6982,10 @@ export interface paths { }; }; }; - "/timeseries/{timeseries_id}/measurements": { - /** lists timeseries by timeseries uuid */ + "/timeseries/{timeseries_id}": { + /** gets a single timeseries by id */ get: { parameters: { - query?: { - /** @description after time */ - after?: string; - /** @description before time */ - before?: string; - /** @description downsample threshold */ - threshold?: number; - }; path: { /** @description timeseries uuid */ timeseries_id: string; @@ -6962,7 +6995,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["MeasurementCollection"]; + "application/json": components["schemas"]["db.VTimeseries"]; }; }; /** @description Bad Request */ @@ -6985,12 +7018,10 @@ export interface paths { }; }; }; - /** deletes a single timeseries measurement by timestamp */ - delete: { + /** updates a single timeseries by id */ + put: { parameters: { - query: { - /** @description timestamp of measurement to delete */ - time: string; + query?: { /** @description api key */ key?: string; }; @@ -6999,13 +7030,17 @@ export interface paths { timeseries_id: string; }; }; + /** @description timeseries payload */ + requestBody: { + content: { + "*/*": components["schemas"]["dto.Timeseries"]; + }; + }; responses: { /** @description OK */ 200: { content: { - "application/json": { - [key: string]: unknown; - }; + "application/json": components["schemas"]["dto.Timeseries"]; }; }; /** @description Bad Request */ @@ -7028,27 +7063,25 @@ export interface paths { }; }; }; - }; - "/timeseries_measurements": { - /** creates or updates one or more timeseries measurements */ - post: { + /** deletes a single timeseries by id */ + delete: { parameters: { - query: { + query?: { /** @description api key */ - key: string; + key?: string; }; - }; - /** @description array of timeseries measurement collections */ - requestBody: { - content: { - "*/*": components["schemas"]["TimeseriesMeasurementCollectionCollection"]; + path: { + /** @description timeseries uuid */ + timeseries_id: string; }; }; responses: { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["MeasurementCollection"][]; + "application/json": { + [key: string]: unknown; + }; }; }; /** @description Bad Request */ @@ -7072,14 +7105,28 @@ export interface paths { }; }; }; - "/units": { - /** lists the available units */ + "/timeseries/{timeseries_id}/measurements": { + /** lists timeseries by timeseries uuid */ get: { + parameters: { + query?: { + /** @description after time */ + after?: string; + /** @description before time */ + before?: string; + /** @description downsample threshold */ + threshold?: number; + }; + path: { + /** @description timeseries uuid */ + timeseries_id: string; + }; + }; responses: { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["Unit"][]; + "application/json": components["schemas"]["db.MeasurementCollection"][]; }; }; /** @description Bad Request */ @@ -7088,58 +7135,135 @@ export interface paths { "application/json": components["schemas"]["echo.HTTPError"]; }; }; + /** @description Not Found */ + 404: { + content: { + "application/json": components["schemas"]["echo.HTTPError"]; + }; + }; + /** @description Internal Server Error */ + 500: { + content: { + "application/json": components["schemas"]["echo.HTTPError"]; + }; + }; }; }; - }; -} - -export type webhooks = Record; - -export interface components { - schemas: { - /** - * @example { - * "message": "{}" - * } - */ - "echo.HTTPError": { - message?: Record; + /** deletes a single timeseries measurement by timestamp */ + delete: { + parameters: { + query: { + /** @description timestamp of measurement to delete */ + time: string; + /** @description api key */ + key?: string; + }; + path: { + /** @description timeseries uuid */ + timeseries_id: string; + }; + }; + responses: { + /** @description OK */ + 200: { + content: { + "application/json": { + [key: string]: unknown; + }; + }; + }; + /** @description Bad Request */ + 400: { + content: { + "application/json": components["schemas"]["echo.HTTPError"]; + }; + }; + /** @description Not Found */ + 404: { + content: { + "application/json": components["schemas"]["echo.HTTPError"]; + }; + }; + /** @description Internal Server Error */ + 500: { + content: { + "application/json": components["schemas"]["echo.HTTPError"]; + }; + }; + }; }; - /** - * @example { - * "geometries": [ - * null, - * null - * ], - * "coordinates": "{}", - * "type": "type" - * } - */ - "geojson.Geometry": { - coordinates?: Record; - geometries?: components["schemas"]["geojson.Geometry"][]; - type?: string; + }; + "/timeseries_measurements": { + /** creates one or more timeseries measurements */ + post: { + requestBody?: { + content: { + "application/json": components["schemas"]["_timeseries_measurements_post_request"]; + "multipart/form-data": components["schemas"]["_timeseries_measurements_post_request"]; + }; + }; + responses: { + /** @description OK */ + 200: { + content: { + "application/json": { + [key: string]: unknown; + }[]; + }; + }; + /** @description Bad Request */ + 400: { + content: { + "application/json": components["schemas"]["echo.HTTPError"]; + }; + }; + /** @description Not Found */ + 404: { + content: { + "application/json": components["schemas"]["echo.HTTPError"]; + }; + }; + /** @description Internal Server Error */ + 500: { + content: { + "application/json": components["schemas"]["echo.HTTPError"]; + }; + }; + }; + }; + }; + "/units": { + /** lists the available units */ + get: { + responses: { + /** @description OK */ + 200: { + content: { + "application/json": components["schemas"]["db.VUnit"][]; + }; + }; + /** @description Bad Request */ + 400: { + content: { + "application/json": components["schemas"]["echo.HTTPError"]; + }; + }; + }; }; + }; +} + +export type webhooks = Record; + +export interface components { + schemas: { /** * @example { - * "x": [ - * 0.8008281904610115, - * 0.8008281904610115 - * ], - * "y": [ - * 6.027456183070403, - * 6.027456183070403 - * ], - * "z": [ - * 1.4658129805029452, - * 1.4658129805029452 - * ] + * "message": "{}" * } */ - AggregatePlotConfigMeasurementsContourPlot: { - x?: number[]; - y?: number[]; - z?: number[]; + "echo.HTTPError": { + message?: Record; }; /** * @example { @@ -7157,18 +7281,18 @@ export interface components { * "alert_config_id": "alert_config_id", * "project_id": "project_id", * "name": "name", + * "created_at": "created_at", * "id": "id", * "body": "body", - * "create_date": "create_date", * "project_name": "project_name" * } */ - Alert: { + "db.AlertGetRow": { alert_config_id?: string; body?: string; - create_date?: string; + created_at?: string; id?: string; - instruments?: components["schemas"]["AlertConfigInstrument"][]; + instruments?: components["schemas"]["db.InstrumentIDName"][]; name?: string; project_id?: string; project_name?: string; @@ -7176,14 +7300,6 @@ export interface components { }; /** * @example { - * "updater_username": "updater_username", - * "alert_type_id": "alert_type_id", - * "creator_username": "creator_username", - * "remind_interval": "remind_interval", - * "body": "body", - * "project_name": "project_name", - * "alert_type": "alert_type", - * "update_date": "update_date", * "instruments": [ * { * "instrument_name": "instrument_name", @@ -7194,67 +7310,26 @@ export interface components { * "instrument_id": "instrument_id" * } * ], + * "read": true, + * "alert_config_id": "alert_config_id", * "project_id": "project_id", - * "last_checked": "last_checked", - * "mute_consecutive_alerts": true, - * "creator_id": "creator_id", - * "last_reminded": "last_reminded", * "name": "name", - * "updater_id": "updater_id", - * "schedule_interval": "schedule_interval", + * "created_at": "created_at", * "id": "id", - * "alert_email_subscriptions": [ - * { - * "user_type": "user_type", - * "id": "id", - * "email": "email", - * "username": "username" - * }, - * { - * "user_type": "user_type", - * "id": "id", - * "email": "email", - * "username": "username" - * } - * ], - * "create_date": "create_date", - * "warning_interval": "warning_interval", - * "start_date": "start_date" + * "body": "body", + * "project_name": "project_name" * } */ - AlertConfig: { - alert_email_subscriptions?: components["schemas"]["EmailAutocompleteResult"][]; - alert_type?: string; - alert_type_id?: string; + "db.AlertListForProfileRow": { + alert_config_id?: string; body?: string; - create_date?: string; - creator_id?: string; - creator_username?: string; + created_at?: string; id?: string; - instruments?: components["schemas"]["AlertConfigInstrument"][]; - last_checked?: string; - last_reminded?: string; - mute_consecutive_alerts?: boolean; + instruments?: components["schemas"]["db.InstrumentIDName"][]; name?: string; project_id?: string; project_name?: string; - remind_interval?: string; - schedule_interval?: string; - start_date?: string; - update_date?: string; - updater_id?: string; - updater_username?: string; - warning_interval?: string; - }; - /** - * @example { - * "instrument_name": "instrument_name", - * "instrument_id": "instrument_id" - * } - */ - AlertConfigInstrument: { - instrument_id?: string; - instrument_name?: string; + read?: boolean; }; /** * @example { @@ -7265,7 +7340,7 @@ export interface components { * "mute_notify": true * } */ - AlertSubscription: { + "db.AlertProfileSubscription": { alert_config_id?: string; id?: string; mute_notify?: boolean; @@ -7280,7 +7355,7 @@ export interface components { * "parameter_id": "parameter_id" * } */ - AwareParameter: { + "db.AwareParameterListRow": { id?: string; key?: string; parameter_id?: string; @@ -7288,1069 +7363,970 @@ export interface components { }; /** * @example { - * "aware_parameters": { - * "key": "aware_parameters" - * }, - * "instrument_id": "instrument_id", - * "aware_id": "aware_id" + * "updated_at": "updated_at", + * "project_id": "project_id", + * "name": "name", + * "updated_by": "updated_by", + * "created_at": "created_at", + * "id": "id", + * "created_by": "created_by", + * "sort_order": 0, + * "slug": "slug" * } */ - AwarePlatformParameterConfig: { - aware_id?: string; - aware_parameters?: { - [key: string]: string; - }; - instrument_id?: string; + "db.CollectionGroup": { + created_at?: string; + created_by?: string; + id?: string; + name?: string; + project_id?: string; + slug?: string; + sort_order?: number; + updated_at?: string; + updated_by?: string; }; /** * @example { - * "formula_name": "formula_name", - * "formula": "formula", - * "id": "id", + * "instrument": "instrument", + * "type": "standard", * "instrument_id": "instrument_id", + * "unit": "unit", + * "parameter": "parameter", + * "name": "name", + * "variable": "{}", + * "latest_value": 6.027456183070403, + * "id": "id", + * "instrument_slug": "instrument_slug", + * "is_computed": true, + * "latest_time": "latest_time", + * "sort_order": 1, * "unit_id": "unit_id", * "slug": "slug", * "parameter_id": "parameter_id" * } */ - CalculatedTimeseries: { - formula?: string; - formula_name?: string; + "db.CollectionGroupDetailsTimeseries": { id?: string; + instrument?: string; instrument_id?: string; + instrument_slug?: string; + is_computed?: boolean; + latest_time?: string; + latest_value?: number; + name?: string; + parameter?: string; parameter_id?: string; slug?: string; + sort_order?: number; + type?: components["schemas"]["db.TimeseriesType"]; + unit?: string; unit_id?: string; + variable?: Record; }; /** * @example { - * "updater_username": "updater_username", - * "project_id": "project_id", - * "creator_username": "creator_username", - * "creator_id": "creator_id", - * "name": "name", - * "updater_id": "updater_id", + * "timeseries_id": "timeseries_id", * "id": "id", - * "create_date": "create_date", - * "slug": "slug", - * "update_date": "update_date" + * "display_name": "display_name", + * "instrument_id": "instrument_id", + * "field_name": "field_name" * } */ - CollectionGroup: { - create_date?: string; - creator_id?: string; - creator_username?: string; + "db.DataloggerEquivalencyTableField": { + display_name?: string; + field_name?: string; id?: string; - name?: string; - project_id?: string; - slug?: string; - update_date?: string; - updater_id?: string; - updater_username?: string; + instrument_id?: string; + timeseries_id?: string; }; /** * @example { - * "updater_username": "updater_username", - * "timeseries": [ - * { - * "values": [ - * { - * "annotation": "annotation", - * "validated": true, - * "masked": true, - * "time": "time", - * "error": "error", - * "value": 0.8008281904610115 - * }, - * { - * "annotation": "annotation", - * "validated": true, - * "masked": true, - * "time": "time", - * "error": "error", - * "value": 0.8008281904610115 - * } - * ], - * "instrument": "instrument", - * "type": "type", - * "instrument_id": "instrument_id", - * "unit": "unit", - * "parameter": "parameter", - * "name": "name", - * "variable": "variable", - * "latest_value": 0.8008281904610115, - * "id": "id", - * "instrument_slug": "instrument_slug", - * "is_computed": true, - * "latest_time": "latest_time", - * "unit_id": "unit_id", - * "slug": "slug", - * "parameter_id": "parameter_id" - * }, - * { - * "values": [ - * { - * "annotation": "annotation", - * "validated": true, - * "masked": true, - * "time": "time", - * "error": "error", - * "value": 0.8008281904610115 - * }, - * { - * "annotation": "annotation", - * "validated": true, - * "masked": true, - * "time": "time", - * "error": "error", - * "value": 0.8008281904610115 - * } - * ], - * "instrument": "instrument", - * "type": "type", - * "instrument_id": "instrument_id", - * "unit": "unit", - * "parameter": "parameter", - * "name": "name", - * "variable": "variable", - * "latest_value": 0.8008281904610115, - * "id": "id", - * "instrument_slug": "instrument_slug", - * "is_computed": true, - * "latest_time": "latest_time", - * "unit_id": "unit_id", - * "slug": "slug", - * "parameter_id": "parameter_id" - * } - * ], - * "project_id": "project_id", - * "creator_username": "creator_username", - * "creator_id": "creator_id", - * "name": "name", - * "updater_id": "updater_id", * "id": "id", - * "create_date": "create_date", - * "slug": "slug", - * "update_date": "update_date" + * "table_name": "table_name" * } */ - CollectionGroupDetails: { - create_date?: string; - creator_id?: string; - creator_username?: string; + "db.DataloggerTableIDName": { id?: string; - name?: string; - project_id?: string; - slug?: string; - timeseries?: components["schemas"]["collectionGroupDetailsTimeseries"][]; - update_date?: string; - updater_id?: string; - updater_username?: string; + table_name?: string; + }; + "db.DomainGroupOpt": { + description?: string; + id?: string; + value?: string; }; /** * @example { - * "updater_username": "updater_username", - * "creator_username": "creator_username", - * "model_id": "model_id", - * "update_date": "update_date", - * "tables": [ - * { - * "id": "id", - * "table_name": "table_name" - * }, - * { - * "id": "id", - * "table_name": "table_name" - * } - * ], - * "project_id": "project_id", - * "creator_id": "creator_id", - * "name": "name", - * "updater_id": "updater_id", - * "model": "model", + * "user_type": "user_type", * "id": "id", - * "sn": "sn", - * "create_date": "create_date", - * "errors": [ - * "errors", - * "errors" - * ], - * "slug": "slug" + * "email": "email", + * "username": "{}" * } */ - Datalogger: { - create_date?: string; - creator_id?: string; - creator_username?: string; - errors?: string[]; + "db.EmailAutocompleteListRow": { + email?: string; id?: string; - model?: string; - model_id?: string; - name?: string; - project_id?: string; - slug?: string; - sn?: string; - tables?: components["schemas"]["DataloggerTable"][]; - update_date?: string; - updater_id?: string; - updater_username?: string; + user_type?: string; + username?: Record; }; /** * @example { + * "user_type": "user_type", * "id": "id", - * "table_name": "table_name" + * "email": "email", + * "username": "username" * } */ - DataloggerTable: { + "db.EmailAutocompleteResult": { + email?: string; id?: string; - table_name?: string; + user_type?: string; + username?: string; }; /** * @example { - * "preview": { - * "bytes": [ - * 0, - * 0 - * ], - * "status": 6 - * }, - * "datalogger_table_id": "datalogger_table_id", - * "update_date": "update_date" + * "new_instruments_7d": 1, + * "project_count": 5, + * "instrument_group_count": 6, + * "new_measurements_2h": 5, + * "instrument_count": 0 * } */ - DataloggerTablePreview: { - datalogger_table_id?: string; - preview?: components["schemas"]["pgtype.JSON"]; - update_date?: string; + "db.HomeGetRow": { + instrument_count?: number; + instrument_group_count?: number; + new_instruments_7d?: number; + new_measurements_2h?: number; + project_count?: number; }; /** * @example { - * "updater_username": "updater_username", - * "creator_username": "creator_username", - * "model_id": "model_id", - * "update_date": "update_date", - * "tables": [ - * { - * "id": "id", - * "table_name": "table_name" - * }, - * { - * "id": "id", - * "table_name": "table_name" - * } - * ], - * "project_id": "project_id", - * "creator_id": "creator_id", * "name": "name", - * "updater_id": "updater_id", - * "model": "model", * "id": "id", - * "sn": "sn", - * "create_date": "create_date", - * "errors": [ - * "errors", - * "errors" - * ], - * "key": "key", * "slug": "slug" * } */ - DataloggerWithKey: { - create_date?: string; - creator_id?: string; - creator_username?: string; - errors?: string[]; + "db.IDSlugName": { id?: string; - key?: string; - model?: string; - model_id?: string; name?: string; - project_id?: string; slug?: string; - sn?: string; - tables?: components["schemas"]["DataloggerTable"][]; - update_date?: string; - updater_id?: string; - updater_username?: string; }; /** * @example { - * "office_id": "office_id", - * "agency": "agency", - * "initials": "initials", - * "division_initials": "division_initials", - * "division_name": "division_name", - * "name": "name", - * "id": "id" + * "id": "id", + * "slug": "slug" * } */ - District: { - agency?: string; - division_initials?: string; - division_name?: string; + "db.InstrumentCreateBatchRow": { id?: string; - initials?: string; - name?: string; - office_id?: string; + slug?: string; }; /** * @example { - * "expected_total_submittals": 6, - * "office_id": "office_id", - * "alert_type_id": "alert_type_id", - * "month": "month", + * "deleted": true, + * "updated_at": "updated_at", * "project_id": "project_id", - * "red_submittals": 5, - * "green_submittals": 1, - * "yellow_submittals": 5, - * "actual_total_submittals": 0, - * "district_initials": "district_initials", - * "project_name": "project_name" + * "name": "name", + * "updated_by": "updated_by", + * "created_at": "created_at", + * "description": "description", + * "id": "id", + * "created_by": "created_by", + * "slug": "slug" * } */ - DistrictRollup: { - actual_total_submittals?: number; - alert_type_id?: string; - district_initials?: string; - expected_total_submittals?: number; - green_submittals?: number; - month?: string; - office_id?: string; + "db.InstrumentGroup": { + created_at?: string; + created_by?: string; + deleted?: boolean; + description?: string; + id?: string; + name?: string; project_id?: string; - project_name?: string; - red_submittals?: number; - yellow_submittals?: number; + slug?: string; + updated_at?: string; + updated_by?: string; }; /** * @example { + * "updated_at": "updated_at", + * "project_id": "project_id", + * "name": "name", + * "updated_by": "updated_by", + * "created_at": "created_at", * "description": "description", * "id": "id", - * "value": "value", - * "group": "group" + * "created_by": "created_by", + * "slug": "slug" * } */ - Domain: { - description?: string; - group?: string; - id?: string; - value?: string; - }; - DomainGroupOption: { + "db.InstrumentGroupUpdateRow": { + created_at?: string; + created_by?: string; description?: string; id?: string; - value?: string; + name?: string; + project_id?: string; + slug?: string; + updated_at?: string; + updated_by?: string; }; - DomainMap: { - [key: string]: components["schemas"]["DomainGroupOption"][]; + /** + * @example { + * "instrument_name": "instrument_name", + * "instrument_id": "instrument_id" + * } + */ + "db.InstrumentIDName": { + instrument_id?: string; + instrument_name?: string; }; /** * @example { - * "user_type": "user_type", + * "updated_at": "updated_at", + * "updated_by": "updated_by", + * "created_at": "created_at", * "id": "id", - * "email": "email", - * "username": "username" + * "time": "time", + * "body": "body", + * "title": "title", + * "created_by": "created_by", + * "instrument_id": "instrument_id" * } */ - EmailAutocompleteResult: { - email?: string; + "db.InstrumentNote": { + body?: string; + created_at?: string; + created_by?: string; id?: string; - user_type?: string; - username?: string; + instrument_id?: string; + time?: string; + title?: string; + updated_at?: string; + updated_by?: string; }; /** * @example { - * "datalogger_table_id": "datalogger_table_id", - * "datalogger_table_name": "datalogger_table_name", - * "rows": [ - * { - * "timeseries_id": "timeseries_id", - * "id": "id", - * "display_name": "display_name", - * "instrument_id": "instrument_id", - * "field_name": "field_name" - * }, - * { - * "timeseries_id": "timeseries_id", - * "id": "id", - * "display_name": "display_name", - * "instrument_id": "instrument_id", - * "field_name": "field_name" - * } - * ], - * "datalogger_id": "datalogger_id" + * "elevation": 6.027456183070403, + * "temp": 5.637376656633329, + * "inc_dev": 1.4658129805029452, + * "tilt": 2.3021358869347655, + * "segment_id": 5, + * "cum_dev": 0.8008281904610115 * } */ - EquivalencyTable: { - datalogger_id?: string; - datalogger_table_id?: string; - datalogger_table_name?: string; - rows?: components["schemas"]["EquivalencyTableRow"][]; + "db.IpiMeasurement": { + cum_dev?: number; + elevation?: number; + inc_dev?: number; + segment_id?: number; + temp?: number; + tilt?: number; }; + /** @enum {string} */ + "db.JobStatus": "SUCCESS" | "FAIL" | "INIT"; /** * @example { - * "timeseries_id": "timeseries_id", - * "id": "id", - * "display_name": "display_name", - * "instrument_id": "instrument_id", - * "field_name": "field_name" + * "annotation": "annotation", + * "validated": true, + * "masked": true, + * "time": "time", + * "error": "error", + * "value": 0.8008281904610115 * } */ - EquivalencyTableRow: { - display_name?: string; - field_name?: string; - id?: string; - instrument_id?: string; - timeseries_id?: string; + "db.Measurement": { + annotation?: string; + error?: string; + masked?: boolean; + time?: string; + validated?: boolean; + value?: number; }; /** * @example { - * "end_date": "end_date", - * "updater_username": "updater_username", - * "alert_config_id": "alert_config_id", - * "creator_username": "creator_username", - * "alert_config_name": "alert_config_name", - * "body": "body", - * "project_name": "project_name", - * "submittal_id": "submittal_id", - * "update_date": "update_date", - * "instruments": [ + * "timeseries_id": "timeseries_id", + * "items": [ * { - * "instrument_name": "instrument_name", - * "instrument_id": "instrument_id" + * "annotation": "annotation", + * "validated": true, + * "masked": true, + * "time": "time", + * "error": "error", + * "value": 0.8008281904610115 * }, * { - * "instrument_name": "instrument_name", - * "instrument_id": "instrument_id" - * } - * ], - * "project_id": "project_id", - * "creator_id": "creator_id", - * "name": "name", - * "updater_id": "updater_id", - * "id": "id", - * "create_date": "create_date", - * "start_date": "start_date" + * "annotation": "annotation", + * "validated": true, + * "masked": true, + * "time": "time", + * "error": "error", + * "value": 0.8008281904610115 + * } + * ] * } */ - Evaluation: { - alert_config_id?: string; - alert_config_name?: string; - body?: string; - create_date?: string; - creator_id?: string; - creator_username?: string; - end_date?: string; - id?: string; - instruments?: components["schemas"]["EvaluationInstrument"][]; - name?: string; - project_id?: string; - project_name?: string; - start_date?: string; - submittal_id?: string; - update_date?: string; - updater_id?: string; - updater_username?: string; + "db.MeasurementCollection": { + items?: components["schemas"]["db.Measurement"][]; + timeseries_id?: string; }; /** * @example { - * "instrument_name": "instrument_name", - * "instrument_id": "instrument_id" + * "timeseries_id": "timeseries_id", + * "items": [ + * null, + * null + * ] * } */ - EvaluationInstrument: { - instrument_id?: string; - instrument_name?: string; + "db.MeasurementCollectionLean": { + items?: components["schemas"]["db.MeasurementLean"][]; + timeseries_id?: string; + }; + "db.MeasurementLean": { + [key: string]: number; }; /** * @example { - * "geometries": [ - * { - * "geometries": [ - * null, - * null - * ], - * "coordinates": "{}", - * "type": "type" - * }, - * { - * "geometries": [ - * null, - * null - * ], - * "coordinates": "{}", - * "type": "type" - * } - * ], - * "coordinates": "{}", - * "type": "type" + * "utc_offset": "utc_offset", + * "name": "name", + * "abbrev": "abbrev", + * "is_dst": true * } */ - Geometry: { - coordinates?: Record; - geometries?: components["schemas"]["geojson.Geometry"][]; - type?: string; + "db.PgTimezoneNamesListRow": { + abbrev?: string; + is_dst?: boolean; + name?: string; + utc_offset?: string; }; /** * @example { + * "x": "{}", + * "y": "{}", * "time": "time" * } */ - Heartbeat: { + "db.PlotConfigMeasurementListBullseyeRow": { time?: string; + x?: Record; + y?: Record; }; + /** @enum {string} */ + "db.PlotType": "scatter-line" | "profile" | "contour" | "bullseye"; /** * @example { - * "new_instruments_7d": 1, - * "project_count": 5, - * "instrument_group_count": 6, - * "new_measurements_2h": 5, - * "instrument_count": 0 + * "id": "id", + * "display_name": "display_name", + * "email": "email", + * "username": "username" * } */ - Home: { - instrument_count?: number; - instrument_group_count?: number; - new_instruments_7d?: number; - new_measurements_2h?: number; - project_count?: number; + "db.ProfileCreateRow": { + display_name?: string; + email?: string; + id?: string; + username?: string; }; /** * @example { - * "name": "name", + * "role": "role", + * "role_id": "role_id", + * "profile_id": "profile_id", * "id": "id", - * "slug": "slug" + * "email": "email", + * "username": "username" * } */ - IDSlugName: { + "db.ProfileProjectRoleGetRow": { + email?: string; id?: string; - name?: string; - slug?: string; + profile_id?: string; + role?: string; + role_id?: string; + username?: string; }; /** * @example { - * "creator": "creator", - * "values": [ - * 0, - * 0 - * ], - * "time": "time", - * "create_date": "create_date" + * "role": "role", + * "role_id": "role_id", + * "profile_id": "profile_id", + * "id": "id", + * "email": "email", + * "username": "username" * } */ - InclinometerMeasurement: { - create_date?: string; - creator?: string; - time?: string; - values?: number[]; + "db.ProfileProjectRoleListForProjectRow": { + email?: string; + id?: string; + profile_id?: string; + role?: string; + role_id?: string; + username?: string; }; /** * @example { - * "timeseries_id": "timeseries_id", - * "inclinometers": [ - * { - * "creator": "creator", - * "values": [ - * 0, - * 0 - * ], - * "time": "time", - * "create_date": "create_date" - * }, - * { - * "creator": "creator", - * "values": [ - * 0, - * 0 - * ], - * "time": "time", - * "create_date": "create_date" - * } - * ] + * "id": "id", + * "slug": "slug" * } */ - InclinometerMeasurementCollection: { - inclinometers?: components["schemas"]["InclinometerMeasurement"][]; - timeseries_id?: string; + "db.ProjectCreateBatchRow": { + id?: string; + slug?: string; }; /** * @example { - * "items": [ - * { - * "timeseries_id": "timeseries_id", - * "inclinometers": [ - * { - * "creator": "creator", - * "values": [ - * 0, - * 0 - * ], - * "time": "time", - * "create_date": "create_date" - * }, - * { - * "creator": "creator", - * "values": [ - * 0, - * 0 - * ], - * "time": "time", - * "create_date": "create_date" - * } - * ] - * }, - * { - * "timeseries_id": "timeseries_id", - * "inclinometers": [ - * { - * "creator": "creator", - * "values": [ - * 0, - * 0 - * ], - * "time": "time", - * "create_date": "create_date" - * }, - * { - * "creator": "creator", - * "values": [ - * 0, - * 0 - * ], - * "time": "time", - * "create_date": "create_date" - * } - * ] - * } - * ] + * "date_range": { + * "value": "value", + * "enabled": true + * }, + * "show_nonvalidated": { + * "value": true, + * "enabled": true + * }, + * "show_masked": { + * "value": true, + * "enabled": true + * } * } */ - InclinometerMeasurementCollectionCollection: { - items?: components["schemas"]["InclinometerMeasurementCollection"][]; + "db.ReportConfigGlobalOverrides": { + date_range?: components["schemas"]["db.TextOption"]; + show_masked?: components["schemas"]["db.ToggleOption"]; + show_nonvalidated?: components["schemas"]["db.ToggleOption"]; }; /** * @example { - * "timeseries_id": "timeseries_id", - * "items": [ - * null, - * null - * ] + * "progress_updated_at": "progress_updated_at", + * "file_key": "file_key", + * "report_config_id": "report_config_id", + * "created_at": "created_at", + * "progress": 0, + * "file_expiry": "file_expiry", + * "id": "id", + * "created_by": "created_by", + * "status": "SUCCESS" * } */ - InclinometerMeasurementCollectionLean: { - items?: components["schemas"]["InclinometerMeasurementLean"][]; - timeseries_id?: string; - }; - InclinometerMeasurementLean: { - [key: string]: number[]; + "db.ReportDownloadJob": { + created_at?: string; + created_by?: string; + file_expiry?: string; + file_key?: string; + id?: string; + progress?: number; + progress_updated_at?: string; + report_config_id?: string; + status?: components["schemas"]["db.JobStatus"]; }; /** * @example { - * "has_cwms": true, - * "projects": [ - * { - * "name": "name", - * "id": "id", - * "slug": "slug" - * }, - * { - * "name": "name", - * "id": "id", - * "slug": "slug" - * } - * ], - * "alert_configs": [ - * "alert_configs", - * "alert_configs" - * ], - * "icon": "icon", - * "type": "type", - * "aware_id": "aware_id", - * "status_id": "status_id", - * "opts": { - * "key": "" - * }, - * "station": 6, - * "constants": [ - * "constants", - * "constants" - * ], - * "id": "id", - * "status_time": "status_time", - * "create_date": "create_date", - * "slug": "slug", - * "updater_username": "updater_username", - * "offset": 0, - * "creator_username": "creator_username", - * "type_id": "type_id", - * "show_cwms_tab": true, - * "usgs_id": "usgs_id", - * "groups": [ - * "groups", - * "groups" - * ], - * "update_date": "update_date", - * "creator_id": "creator_id", - * "name": "name", - * "updater_id": "updater_id", - * "geometry": { - * "geometries": [ - * { - * "geometries": [ - * null, - * null - * ], - * "coordinates": "{}", - * "type": "type" - * }, - * { - * "geometries": [ - * null, - * null - * ], - * "coordinates": "{}", - * "type": "type" - * } - * ], - * "coordinates": "{}", - * "type": "type" - * }, - * "nid_id": "nid_id", - * "status": "status" + * "elevation": 0.8008281904610115, + * "temp": 1.4658129805029452, + * "z_cum_dev": 1.2315135367772556, + * "y_increment": 4.145608029883936, + * "x_cum_dev": 7.061401241503109, + * "temp_increment": 5.637376656633329, + * "z_increment": 1.0246457001441578, + * "y_cum_dev": 2.027123023002322, + * "x_increment": 9.301444243932576, + * "x": 2.3021358869347655, + * "y": 3.616076749251911, + * "z": 7.386281948385884, + * "segment_id": 6, + * "temp_cum_dev": 5.962133916683182 * } */ - Instrument: { - alert_configs?: string[]; - aware_id?: string; - constants?: string[]; - create_date?: string; - creator_id?: string; - creator_username?: string; - geometry?: components["schemas"]["Geometry"]; - groups?: string[]; - has_cwms?: boolean; - icon?: string; - id?: string; - name?: string; - nid_id?: string; - offset?: number; - opts?: { - [key: string]: unknown; - }; - projects?: components["schemas"]["IDSlugName"][]; - show_cwms_tab?: boolean; - slug?: string; - station?: number; - status?: string; - status_id?: string; - status_time?: string; - type?: string; - type_id?: string; - update_date?: string; - updater_id?: string; - updater_username?: string; - usgs_id?: string; + "db.SaaMeasurement": { + elevation?: number; + segment_id?: number; + temp?: number; + temp_cum_dev?: number; + temp_increment?: number; + x?: number; + x_cum_dev?: number; + x_increment?: number; + y?: number; + y_cum_dev?: number; + y_increment?: number; + z?: number; + z_cum_dev?: number; + z_increment?: number; }; /** * @example { - * "instrument_count": 0 + * "value": "value", + * "enabled": true * } */ - InstrumentCount: { - instrument_count?: number; + "db.TextOption": { + enabled?: boolean; + value?: string; }; /** * @example { - * "updater_username": "updater_username", - * "creator_username": "creator_username", - * "description": "description", - * "instrument_count": 0, - * "update_date": "update_date", - * "project_id": "project_id", - * "creator_id": "creator_id", - * "name": "name", - * "timeseries_count": 6, - * "updater_id": "updater_id", - * "id": "id", - * "create_date": "create_date", - * "slug": "slug" + * "formula_name": "formula_name", + * "formula": "formula", + * "id": "id", + * "instrument_id": "instrument_id", + * "unit_id": "unit_id", + * "slug": "slug", + * "parameter_id": "parameter_id" * } */ - InstrumentGroup: { - create_date?: string; - creator_id?: string; - creator_username?: string; - description?: string; + "db.TimeseriesComputedListForInstrumentRow": { + formula?: string; + formula_name?: string; id?: string; - instrument_count?: number; - name?: string; - project_id?: string; + instrument_id?: string; + parameter_id?: string; slug?: string; - timeseries_count?: number; - update_date?: string; - updater_id?: string; - updater_username?: string; + unit_id?: string; }; /** * @example { - * "updater_username": "updater_username", - * "creator_username": "creator_username", - * "creator_id": "creator_id", - * "updater_id": "updater_id", + * "name": "name", * "id": "id", - * "time": "time", - * "body": "body", - * "create_date": "create_date", - * "title": "title", + * "type": "standard", * "instrument_id": "instrument_id", - * "update_date": "update_date" + * "unit_id": "unit_id", + * "slug": "slug", + * "parameter_id": "parameter_id" * } */ - InstrumentNote: { - body?: string; - create_date?: string; - creator_id?: string; - creator_username?: string; + "db.TimeseriesCreateBatchRow": { id?: string; instrument_id?: string; - time?: string; - title?: string; - update_date?: string; - updater_id?: string; - updater_username?: string; + name?: string; + parameter_id?: string; + slug?: string; + type?: components["schemas"]["db.TimeseriesType"]; + unit_id?: string; }; + /** @enum {string} */ + "db.TimeseriesType": "standard" | "constant" | "computed" | "cwms"; /** * @example { - * "items": [ - * { - * "updater_username": "updater_username", - * "creator_username": "creator_username", - * "creator_id": "creator_id", - * "updater_id": "updater_id", - * "id": "id", - * "time": "time", - * "body": "body", - * "create_date": "create_date", - * "title": "title", - * "instrument_id": "instrument_id", - * "update_date": "update_date" - * }, - * { - * "updater_username": "updater_username", - * "creator_username": "creator_username", - * "creator_id": "creator_id", - * "updater_id": "updater_id", - * "id": "id", - * "time": "time", - * "body": "body", - * "create_date": "create_date", - * "title": "title", - * "instrument_id": "instrument_id", - * "update_date": "update_date" - * } - * ] + * "value": true, + * "enabled": true * } */ - InstrumentNoteCollection: { - items?: components["schemas"]["InstrumentNote"][]; + "db.ToggleOption": { + enabled?: boolean; + value?: boolean; }; /** * @example { - * "project_ids": [ - * "project_ids", - * "project_ids" - * ] + * "timeseries_id": "timeseries_id", + * "uploader_config_id": "uploader_config_id", + * "field_name": "field_name" * } */ - InstrumentProjectAssignments: { - project_ids?: string[]; + "db.UploaderConfigMapping": { + field_name?: string; + timeseries_id?: string; + uploader_config_id?: string; }; + /** @enum {string} */ + "db.UploaderConfigType": "csv" | "dux" | "toa5"; /** * @example { - * "status_id": "status_id", + * "instruments": [ + * { + * "instrument_name": "instrument_name", + * "instrument_id": "instrument_id" + * }, + * { + * "instrument_name": "instrument_name", + * "instrument_id": "instrument_id" + * } + * ], + * "alert_config_id": "alert_config_id", + * "project_id": "project_id", + * "name": "name", + * "created_at": "created_at", * "id": "id", - * "time": "time", - * "status": "status" + * "body": "body", + * "project_name": "project_name" * } */ - InstrumentStatus: { + "db.VAlert": { + alert_config_id?: string; + body?: string; + created_at?: string; id?: string; - status?: string; - status_id?: string; - time?: string; + instruments?: components["schemas"]["db.InstrumentIDName"][]; + name?: string; + project_id?: string; + project_name?: string; }; /** * @example { - * "items": [ + * "alert_type_id": "alert_type_id", + * "created_at": "created_at", + * "remind_interval": "remind_interval", + * "create_next_submittal_from": "create_next_submittal_from", + * "body": "body", + * "project_name": "project_name", + * "created_by": "created_by", + * "alert_type": "alert_type", + * "last_checked_at": "last_checked_at", + * "updated_by_username": "updated_by_username", + * "instruments": [ + * { + * "instrument_name": "instrument_name", + * "instrument_id": "instrument_id" + * }, + * { + * "instrument_name": "instrument_name", + * "instrument_id": "instrument_id" + * } + * ], + * "updated_at": "updated_at", + * "project_id": "project_id", + * "mute_consecutive_alerts": true, + * "name": "name", + * "updated_by": "updated_by", + * "schedule_interval": "schedule_interval", + * "started_at": "started_at", + * "created_by_username": "created_by_username", + * "id": "id", + * "alert_email_subscriptions": [ * { - * "status_id": "status_id", + * "user_type": "user_type", * "id": "id", - * "time": "time", - * "status": "status" + * "email": "email", + * "username": "username" * }, * { - * "status_id": "status_id", + * "user_type": "user_type", * "id": "id", - * "time": "time", - * "status": "status" + * "email": "email", + * "username": "username" * } - * ] + * ], + * "last_reminded_at": "last_reminded_at", + * "warning_interval": "warning_interval" * } */ - InstrumentStatusCollection: { - items?: components["schemas"]["InstrumentStatus"][]; + "db.VAlertConfig": { + alert_email_subscriptions?: components["schemas"]["db.EmailAutocompleteResult"][]; + alert_type?: string; + alert_type_id?: string; + body?: string; + create_next_submittal_from?: string; + created_at?: string; + created_by?: string; + created_by_username?: string; + id?: string; + instruments?: components["schemas"]["db.InstrumentIDName"][]; + last_checked_at?: string; + last_reminded_at?: string; + mute_consecutive_alerts?: boolean; + name?: string; + project_id?: string; + project_name?: string; + remind_interval?: string; + schedule_interval?: string; + started_at?: string; + updated_at?: string; + updated_by?: string; + updated_by_username?: string; + warning_interval?: string; }; /** * @example { - * "is_valid": true, + * "timeseries": [ + * { + * "instrument": "instrument", + * "type": "standard", + * "instrument_id": "instrument_id", + * "unit": "unit", + * "parameter": "parameter", + * "name": "name", + * "variable": "{}", + * "latest_value": 6.027456183070403, + * "id": "id", + * "instrument_slug": "instrument_slug", + * "is_computed": true, + * "latest_time": "latest_time", + * "sort_order": 1, + * "unit_id": "unit_id", + * "slug": "slug", + * "parameter_id": "parameter_id" + * }, + * { + * "instrument": "instrument", + * "type": "standard", + * "instrument_id": "instrument_id", + * "unit": "unit", + * "parameter": "parameter", + * "name": "name", + * "variable": "{}", + * "latest_value": 6.027456183070403, + * "id": "id", + * "instrument_slug": "instrument_slug", + * "is_computed": true, + * "latest_time": "latest_time", + * "sort_order": 1, + * "unit_id": "unit_id", + * "slug": "slug", + * "parameter_id": "parameter_id" + * } + * ], + * "updated_at": "updated_at", + * "project_id": "project_id", + * "name": "name", + * "updated_by": "updated_by", + * "created_at": "created_at", + * "id": "id", + * "created_by": "created_by", + * "sort_order": 0, + * "slug": "slug" + * } + */ + "db.VCollectionGroupDetails": { + created_at?: string; + created_by?: string; + id?: string; + name?: string; + project_id?: string; + slug?: string; + sort_order?: number; + timeseries?: components["schemas"]["db.CollectionGroupDetailsTimeseries"][]; + updated_at?: string; + updated_by?: string; + }; + /** + * @example { + * "created_at": "created_at", + * "model_id": "model_id", + * "created_by": "created_by", + * "updated_by_username": "updated_by_username", + * "tables": [ + * { + * "id": "id", + * "table_name": "table_name" + * }, + * { + * "id": "id", + * "table_name": "table_name" + * } + * ], + * "updated_at": "updated_at", + * "project_id": "project_id", + * "name": "name", + * "updated_by": "updated_by", + * "created_by_username": "created_by_username", + * "model": "model", + * "id": "id", + * "sn": "sn", * "errors": [ * "errors", * "errors" - * ] + * ], + * "slug": "slug" * } */ - InstrumentsValidation: { + "db.VDatalogger": { + created_at?: string; + created_by?: string; + created_by_username?: string; errors?: string[]; - is_valid?: boolean; + id?: string; + model?: string; + model_id?: string; + name?: string; + project_id?: string; + slug?: string; + sn?: string; + tables?: components["schemas"]["db.DataloggerTableIDName"][]; + updated_at?: string; + updated_by?: string; + updated_by_username?: string; }; /** * @example { - * "time": "time", - * "measurements": [ + * "datalogger_table_id": "datalogger_table_id", + * "datalogger_table_name": "datalogger_table_name", + * "fields": [ * { - * "elevation": 6.027456183070403, - * "temp": 5.637376656633329, - * "inc_dev": 1.4658129805029452, - * "tilt": 2.3021358869347655, - * "segment_id": 5, - * "cum_dev": 0.8008281904610115 + * "timeseries_id": "timeseries_id", + * "id": "id", + * "display_name": "display_name", + * "instrument_id": "instrument_id", + * "field_name": "field_name" * }, * { - * "elevation": 6.027456183070403, - * "temp": 5.637376656633329, - * "inc_dev": 1.4658129805029452, - * "tilt": 2.3021358869347655, - * "segment_id": 5, - * "cum_dev": 0.8008281904610115 + * "timeseries_id": "timeseries_id", + * "id": "id", + * "display_name": "display_name", + * "instrument_id": "instrument_id", + * "field_name": "field_name" * } - * ] + * ], + * "datalogger_id": "datalogger_id" * } */ - IpiMeasurements: { - measurements?: components["schemas"]["IpiSegmentMeasurement"][]; - time?: string; + "db.VDataloggerEquivalencyTable": { + datalogger_id?: string; + datalogger_table_id?: string; + datalogger_table_name?: string; + fields?: components["schemas"]["db.DataloggerEquivalencyTableField"][]; }; /** * @example { - * "temp_timeseries_id": "temp_timeseries_id", - * "length": 6.027456183070403, - * "tilt_timeseries_id": "tilt_timeseries_id", - * "id": 0, - * "inc_dev_timeseries_id": "inc_dev_timeseries_id", - * "instrument_id": "instrument_id", - * "length_timeseries_id": "length_timeseries_id" + * "preview": [ + * 0, + * 0 + * ], + * "updated_at": "updated_at", + * "datalogger_table_id": "datalogger_table_id" * } */ - IpiSegment: { - id?: number; - inc_dev_timeseries_id?: string; - instrument_id?: string; - length?: number; - length_timeseries_id?: string; - temp_timeseries_id?: string; - tilt_timeseries_id?: string; + "db.VDataloggerPreview": { + datalogger_table_id?: string; + preview?: number[]; + updated_at?: string; }; /** * @example { - * "elevation": 6.027456183070403, - * "temp": 5.637376656633329, - * "inc_dev": 1.4658129805029452, - * "tilt": 2.3021358869347655, - * "segment_id": 5, - * "cum_dev": 0.8008281904610115 - * } - */ - IpiSegmentMeasurement: { - cum_dev?: number; - elevation?: number; - inc_dev?: number; - segment_id?: number; - temp?: number; - tilt?: number; + * "office_id": "office_id", + * "agency": "agency", + * "initials": "initials", + * "division_initials": "division_initials", + * "division_name": "division_name", + * "name": "name", + * "id": "id" + * } + */ + "db.VDistrict": { + agency?: string; + division_initials?: string; + division_name?: string; + id?: string; + initials?: string; + name?: string; + office_id?: string; }; /** * @example { - * "annotation": "annotation", - * "validated": true, - * "masked": true, - * "time": "time", - * "error": "error", - * "value": 0.8008281904610115 + * "expected_total_submittals": 6, + * "office_id": "office_id", + * "alert_type_id": "alert_type_id", + * "month": "month", + * "project_id": "project_id", + * "red_submittals": 5, + * "green_submittals": 1, + * "yellow_submittals": 5, + * "actual_total_submittals": 0, + * "district_initials": "district_initials", + * "project_name": "project_name" * } */ - Measurement: { - annotation?: string; - error?: string; - masked?: boolean; - time?: string; - validated?: boolean; - value?: number; + "db.VDistrictRollup": { + actual_total_submittals?: number; + alert_type_id?: string; + district_initials?: string; + expected_total_submittals?: number; + green_submittals?: number; + month?: string; + office_id?: string; + project_id?: string; + project_name?: string; + red_submittals?: number; + yellow_submittals?: number; + }; + "db.VDomain": { + description?: string; + group?: string; + id?: string; + value?: string; }; /** * @example { - * "timeseries_id": "timeseries_id", - * "items": [ + * "alert_config_id": "alert_config_id", + * "created_at": "created_at", + * "alert_config_name": "alert_config_name", + * "body": "body", + * "project_name": "project_name", + * "created_by": "created_by", + * "submittal_id": "submittal_id", + * "updated_by_username": "updated_by_username", + * "instruments": [ * { - * "annotation": "annotation", - * "validated": true, - * "masked": true, - * "time": "time", - * "error": "error", - * "value": 0.8008281904610115 + * "instrument_name": "instrument_name", + * "instrument_id": "instrument_id" * }, * { - * "annotation": "annotation", - * "validated": true, - * "masked": true, - * "time": "time", - * "error": "error", - * "value": 0.8008281904610115 + * "instrument_name": "instrument_name", + * "instrument_id": "instrument_id" * } - * ] + * ], + * "updated_at": "updated_at", + * "project_id": "project_id", + * "name": "name", + * "updated_by": "updated_by", + * "started_at": "started_at", + * "created_by_username": "created_by_username", + * "id": "id", + * "ended_at": "ended_at" * } */ - MeasurementCollection: { - items?: components["schemas"]["Measurement"][]; - timeseries_id?: string; + "db.VEvaluation": { + alert_config_id?: string; + alert_config_name?: string; + body?: string; + created_at?: string; + created_by?: string; + created_by_username?: string; + ended_at?: string; + id?: string; + instruments?: components["schemas"]["db.InstrumentIDName"][]; + name?: string; + project_id?: string; + project_name?: string; + started_at?: string; + submittal_id?: string; + updated_at?: string; + updated_by?: string; + updated_by_username?: string; }; /** * @example { - * "timeseries_id": "timeseries_id", - * "items": [ - * null, - * null - * ] + * "time": "time", + * "instrument_id": "instrument_id", + * "measurements": "{}" * } */ - MeasurementCollectionLean: { - items?: components["schemas"]["MeasurementLean"][]; - timeseries_id?: string; - }; - MeasurementLean: { - [key: string]: number; + "db.VInclMeasurement": { + instrument_id?: string; + measurements?: Record; + time?: string; }; - Opts: { - [key: string]: unknown; + /** + * @example { + * "depth_timeseries_id": "depth_timeseries_id", + * "b180_timeseries_id": "b180_timeseries_id", + * "a180_timeseries_id": "a180_timeseries_id", + * "id": 0, + * "instrument_id": "instrument_id", + * "a0_timeseries_id": "a0_timeseries_id", + * "b0_timeseries_id": "b0_timeseries_id" + * } + */ + "db.VInclSegment": { + a0_timeseries_id?: string; + a180_timeseries_id?: string; + b0_timeseries_id?: string; + b180_timeseries_id?: string; + depth_timeseries_id?: string; + id?: number; + instrument_id?: string; }; /** * @example { - * "date_range": "date_range", - * "updater_username": "updater_username", - * "creator_username": "creator_username", - * "display": { - * "key": "" - * }, - * "show_comments": true, - * "report_configs": [ + * "has_cwms": true, + * "projects": [ * { * "name": "name", * "id": "id", @@ -8362,55 +8338,41 @@ export interface components { * "slug": "slug" * } * ], - * "auto_range": true, - * "show_masked": true, - * "threshold": 0, - * "update_date": "update_date", - * "show_nonvalidated": true, - * "project_id": "project_id", - * "creator_id": "creator_id", - * "name": "name", - * "updater_id": "updater_id", + * "alert_configs": [ + * "alert_configs", + * "alert_configs" + * ], + * "icon": "icon", + * "created_at": "created_at", + * "type": "type", + * "status_id": "status_id", + * "opts": "{}", + * "updated_at": "updated_at", + * "station": 1, + * "constants": [ + * "constants", + * "constants" + * ], * "id": "id", - * "create_date": "create_date", - * "plot_type": "plot_type", - * "slug": "slug" - * } - */ - PlotConfig: { - auto_range?: boolean; - create_date?: string; - creator_id?: string; - creator_username?: string; - date_range?: string; - display?: { - [key: string]: unknown; - }; - id?: string; - name?: string; - plot_type?: string; - project_id?: string; - report_configs?: components["schemas"]["IDSlugName"][]; - show_comments?: boolean; - show_masked?: boolean; - show_nonvalidated?: boolean; - slug?: string; - threshold?: number; - update_date?: string; - updater_id?: string; - updater_username?: string; - }; - /** - * @example { - * "date_range": "date_range", - * "updater_username": "updater_username", - * "creator_username": "creator_username", - * "display": { - * "y_axis_timeseries_id": "y_axis_timeseries_id", - * "x_axis_timeseries_id": "x_axis_timeseries_id" - * }, - * "show_comments": true, - * "report_configs": [ + * "status_time": "status_time", + * "slug": "slug", + * "offset": 6, + * "type_id": "type_id", + * "show_cwms_tab": true, + * "usgs_id": "usgs_id", + * "groups": [ + * "groups", + * "groups" + * ], + * "created_by": "created_by", + * "name": "name", + * "updated_by": "updated_by", + * "geometry": [ + * 0, + * 0 + * ], + * "nid_id": "nid_id", + * "telemetry": [ * { * "name": "name", * "id": "id", @@ -8422,372 +8384,132 @@ export interface components { * "slug": "slug" * } * ], - * "auto_range": true, - * "show_masked": true, - * "threshold": 0, - * "update_date": "update_date", - * "show_nonvalidated": true, - * "project_id": "project_id", - * "creator_id": "creator_id", - * "name": "name", - * "updater_id": "updater_id", - * "id": "id", - * "create_date": "create_date", - * "plot_type": "plot_type", - * "slug": "slug" + * "status": "status" * } */ - PlotConfigBullseyePlot: { - auto_range?: boolean; - create_date?: string; - creator_id?: string; - creator_username?: string; - date_range?: string; - display?: components["schemas"]["PlotConfigBullseyePlotDisplay"]; + "db.VInstrument": { + alert_configs?: string[]; + constants?: string[]; + created_at?: string; + created_by?: string; + geometry?: number[]; + groups?: string[]; + has_cwms?: boolean; + icon?: string; id?: string; name?: string; - plot_type?: string; - project_id?: string; - report_configs?: components["schemas"]["IDSlugName"][]; - show_comments?: boolean; - show_masked?: boolean; - show_nonvalidated?: boolean; + nid_id?: string; + offset?: number; + opts?: Record; + projects?: components["schemas"]["db.IDSlugName"][]; + show_cwms_tab?: boolean; slug?: string; - threshold?: number; - update_date?: string; - updater_id?: string; - updater_username?: string; - }; - /** - * @example { - * "y_axis_timeseries_id": "y_axis_timeseries_id", - * "x_axis_timeseries_id": "x_axis_timeseries_id" - * } - */ - PlotConfigBullseyePlotDisplay: { - x_axis_timeseries_id?: string; - y_axis_timeseries_id?: string; + station?: number; + status?: string; + status_id?: string; + status_time?: string; + telemetry?: components["schemas"]["db.IDSlugName"][]; + type?: string; + type_id?: string; + updated_at?: string; + updated_by?: string; + usgs_id?: string; }; /** * @example { - * "date_range": "date_range", - * "updater_username": "updater_username", - * "creator_username": "creator_username", - * "display": { - * "contour_smoothing": true, - * "gradient_smoothing": true, - * "locf_backfill": "locf_backfill", - * "timeseries_ids": [ - * "timeseries_ids", - * "timeseries_ids" - * ], - * "show_labels": true, - * "time": "time" - * }, - * "show_comments": true, - * "report_configs": [ - * { - * "name": "name", - * "id": "id", - * "slug": "slug" - * }, - * { - * "name": "name", - * "id": "id", - * "slug": "slug" - * } - * ], - * "auto_range": true, - * "show_masked": true, - * "threshold": 0, - * "update_date": "update_date", - * "show_nonvalidated": true, + * "updated_at": "updated_at", * "project_id": "project_id", - * "creator_id": "creator_id", * "name": "name", - * "updater_id": "updater_id", + * "timeseries_count": "{}", + * "updated_by": "updated_by", + * "created_at": "created_at", + * "description": "description", * "id": "id", - * "create_date": "create_date", - * "plot_type": "plot_type", + * "created_by": "created_by", + * "instrument_count": 0, * "slug": "slug" * } */ - PlotConfigContourPlot: { - auto_range?: boolean; - create_date?: string; - creator_id?: string; - creator_username?: string; - date_range?: string; - display?: components["schemas"]["PlotConfigContourPlotDisplay"]; + "db.VInstrumentGroup": { + created_at?: string; + created_by?: string; + description?: string; id?: string; + instrument_count?: number; name?: string; - plot_type?: string; project_id?: string; - report_configs?: components["schemas"]["IDSlugName"][]; - show_comments?: boolean; - show_masked?: boolean; - show_nonvalidated?: boolean; slug?: string; - threshold?: number; - update_date?: string; - updater_id?: string; - updater_username?: string; + timeseries_count?: Record; + updated_at?: string; + updated_by?: string; }; /** * @example { - * "contour_smoothing": true, - * "gradient_smoothing": true, - * "locf_backfill": "locf_backfill", - * "timeseries_ids": [ - * "timeseries_ids", - * "timeseries_ids" - * ], - * "show_labels": true, - * "time": "time" + * "status_id": "status_id", + * "id": "id", + * "time": "time", + * "instrument_id": "instrument_id", + * "status": "status" * } */ - PlotConfigContourPlotDisplay: { - contour_smoothing?: boolean; - gradient_smoothing?: boolean; - locf_backfill?: string; - show_labels?: boolean; - time?: string; - timeseries_ids?: string[]; - }; - /** - * @example { - * "x": 0.8008281904610115, - * "y": 6.027456183070403, - * "time": "time" - * } - */ - PlotConfigMeasurementBullseyePlot: { - time?: string; - x?: number; - y?: number; - }; - /** - * @example { - * "date_range": "date_range", - * "updater_username": "updater_username", - * "creator_username": "creator_username", - * "display": { - * "instrument_type": "instrument_type", - * "instrument_id": "instrument_id" - * }, - * "show_comments": true, - * "report_configs": [ - * { - * "name": "name", - * "id": "id", - * "slug": "slug" - * }, - * { - * "name": "name", - * "id": "id", - * "slug": "slug" - * } - * ], - * "auto_range": true, - * "show_masked": true, - * "threshold": 0, - * "update_date": "update_date", - * "show_nonvalidated": true, - * "project_id": "project_id", - * "creator_id": "creator_id", - * "name": "name", - * "updater_id": "updater_id", - * "id": "id", - * "create_date": "create_date", - * "plot_type": "plot_type", - * "slug": "slug" - * } - */ - PlotConfigProfilePlot: { - auto_range?: boolean; - create_date?: string; - creator_id?: string; - creator_username?: string; - date_range?: string; - display?: components["schemas"]["PlotConfigProfilePlotDisplay"]; + "db.VInstrumentStatus": { id?: string; - name?: string; - plot_type?: string; - project_id?: string; - report_configs?: components["schemas"]["IDSlugName"][]; - show_comments?: boolean; - show_masked?: boolean; - show_nonvalidated?: boolean; - slug?: string; - threshold?: number; - update_date?: string; - updater_id?: string; - updater_username?: string; - }; - /** - * @example { - * "instrument_type": "instrument_type", - * "instrument_id": "instrument_id" - * } - */ - PlotConfigProfilePlotDisplay: { instrument_id?: string; - instrument_type?: string; - }; - /** - * @example { - * "color": "color", - * "data_point": 0.8008281904610115, - * "name": "name", - * "plot_configuration_id": "plot_configuration_id", - * "enabled": true - * } - */ - PlotConfigScatterLineCustomShape: { - color?: string; - data_point?: number; - enabled?: boolean; - name?: string; - plot_configuration_id?: string; + status?: string; + status_id?: string; + time?: string; }; /** * @example { - * "layout": { - * "custom_shapes": [ - * { - * "color": "color", - * "data_point": 0.8008281904610115, - * "name": "name", - * "plot_configuration_id": "plot_configuration_id", - * "enabled": true - * }, - * { - * "color": "color", - * "data_point": 0.8008281904610115, - * "name": "name", - * "plot_configuration_id": "plot_configuration_id", - * "enabled": true - * } - * ], - * "y_axis_title": "y_axis_title", - * "y2_axis_title": "y2_axis_title" - * }, - * "traces": [ + * "time": "time", + * "instrument_id": "instrument_id", + * "measurements": [ * { - * "trace_type": "trace_type", - * "color": "color", - * "show_markers": true, - * "timeseries_id": "timeseries_id", - * "y_axis": "y_axis", - * "parameter": "parameter", - * "name": "name", - * "width": 1.4658129805029452, - * "line_style": "line_style", - * "plot_configuration_id": "plot_configuration_id", - * "trace_order": 6 + * "elevation": 6.027456183070403, + * "temp": 5.637376656633329, + * "inc_dev": 1.4658129805029452, + * "tilt": 2.3021358869347655, + * "segment_id": 5, + * "cum_dev": 0.8008281904610115 * }, * { - * "trace_type": "trace_type", - * "color": "color", - * "show_markers": true, - * "timeseries_id": "timeseries_id", - * "y_axis": "y_axis", - * "parameter": "parameter", - * "name": "name", - * "width": 1.4658129805029452, - * "line_style": "line_style", - * "plot_configuration_id": "plot_configuration_id", - * "trace_order": 6 + * "elevation": 6.027456183070403, + * "temp": 5.637376656633329, + * "inc_dev": 1.4658129805029452, + * "tilt": 2.3021358869347655, + * "segment_id": 5, + * "cum_dev": 0.8008281904610115 * } * ] * } */ - PlotConfigScatterLineDisplay: { - layout?: components["schemas"]["PlotConfigScatterLineLayout"]; - traces?: components["schemas"]["PlotConfigScatterLineTimeseriesTrace"][]; + "db.VIpiMeasurement": { + instrument_id?: string; + measurements?: components["schemas"]["db.IpiMeasurement"][]; + time?: string; }; /** * @example { - * "custom_shapes": [ - * { - * "color": "color", - * "data_point": 0.8008281904610115, - * "name": "name", - * "plot_configuration_id": "plot_configuration_id", - * "enabled": true - * }, - * { - * "color": "color", - * "data_point": 0.8008281904610115, - * "name": "name", - * "plot_configuration_id": "plot_configuration_id", - * "enabled": true - * } - * ], - * "y_axis_title": "y_axis_title", - * "y2_axis_title": "y2_axis_title" + * "length": 6.027456183070403, + * "tilt_timeseries_id": "tilt_timeseries_id", + * "id": 0, + * "inc_dev_timeseries_id": "inc_dev_timeseries_id", + * "instrument_id": "instrument_id", + * "length_timeseries_id": "length_timeseries_id" * } */ - PlotConfigScatterLineLayout: { - custom_shapes?: components["schemas"]["PlotConfigScatterLineCustomShape"][]; - y2_axis_title?: string; - y_axis_title?: string; + "db.VIpiSegment": { + id?: number; + inc_dev_timeseries_id?: string; + instrument_id?: string; + length?: number; + length_timeseries_id?: string; + tilt_timeseries_id?: string; }; /** * @example { * "date_range": "date_range", - * "updater_username": "updater_username", - * "creator_username": "creator_username", - * "display": { - * "layout": { - * "custom_shapes": [ - * { - * "color": "color", - * "data_point": 0.8008281904610115, - * "name": "name", - * "plot_configuration_id": "plot_configuration_id", - * "enabled": true - * }, - * { - * "color": "color", - * "data_point": 0.8008281904610115, - * "name": "name", - * "plot_configuration_id": "plot_configuration_id", - * "enabled": true - * } - * ], - * "y_axis_title": "y_axis_title", - * "y2_axis_title": "y2_axis_title" - * }, - * "traces": [ - * { - * "trace_type": "trace_type", - * "color": "color", - * "show_markers": true, - * "timeseries_id": "timeseries_id", - * "y_axis": "y_axis", - * "parameter": "parameter", - * "name": "name", - * "width": 1.4658129805029452, - * "line_style": "line_style", - * "plot_configuration_id": "plot_configuration_id", - * "trace_order": 6 - * }, - * { - * "trace_type": "trace_type", - * "color": "color", - * "show_markers": true, - * "timeseries_id": "timeseries_id", - * "y_axis": "y_axis", - * "parameter": "parameter", - * "name": "name", - * "width": 1.4658129805029452, - * "line_style": "line_style", - * "plot_configuration_id": "plot_configuration_id", - * "trace_order": 6 - * } - * ] - * }, + * "display": "{}", * "show_comments": true, * "report_configs": [ * { @@ -8801,72 +8523,39 @@ export interface components { * "slug": "slug" * } * ], + * "created_at": "created_at", * "auto_range": true, * "show_masked": true, - * "threshold": 5, - * "update_date": "update_date", + * "threshold": 0, + * "created_by": "created_by", * "show_nonvalidated": true, + * "updated_at": "updated_at", * "project_id": "project_id", - * "creator_id": "creator_id", * "name": "name", - * "updater_id": "updater_id", + * "updated_by": "updated_by", * "id": "id", - * "create_date": "create_date", - * "plot_type": "plot_type", + * "plot_type": "scatter-line", * "slug": "slug" * } */ - PlotConfigScatterLinePlot: { + "db.VPlotConfiguration": { auto_range?: boolean; - create_date?: string; - creator_id?: string; - creator_username?: string; + created_at?: string; + created_by?: string; date_range?: string; - display?: components["schemas"]["PlotConfigScatterLineDisplay"]; + display?: Record; id?: string; name?: string; - plot_type?: string; + plot_type?: components["schemas"]["db.PlotType"]; project_id?: string; - report_configs?: components["schemas"]["IDSlugName"][]; + report_configs?: components["schemas"]["db.IDSlugName"][]; show_comments?: boolean; show_masked?: boolean; show_nonvalidated?: boolean; slug?: string; threshold?: number; - update_date?: string; - updater_id?: string; - updater_username?: string; - }; - /** - * @example { - * "trace_type": "trace_type", - * "color": "color", - * "show_markers": true, - * "timeseries_id": "timeseries_id", - * "y_axis": "y_axis", - * "parameter": "parameter", - * "name": "name", - * "width": 1.4658129805029452, - * "line_style": "line_style", - * "plot_configuration_id": "plot_configuration_id", - * "trace_order": 6 - * } - */ - PlotConfigScatterLineTimeseriesTrace: { - color?: string; - line_style?: string; - /** @description read-only */ - name?: string; - /** @description read-only */ - parameter?: string; - plot_configuration_id?: string; - show_markers?: boolean; - timeseries_id?: string; - trace_order?: number; - trace_type?: string; - width?: number; - /** @description y1 or y2, default y1 */ - y_axis?: string; + updated_at?: string; + updated_by?: string; }; /** * @example { @@ -8887,96 +8576,69 @@ export interface components { * ], * "id": "id", * "display_name": "display_name", + * "edipi": 0, * "email": "email", * "username": "username" * } */ - Profile: { + "db.VProfile": { display_name?: string; + edipi?: number; email?: string; id?: string; is_admin?: boolean; roles?: string[]; - tokens?: components["schemas"]["TokenInfoProfile"][]; + tokens?: components["schemas"]["db.VProfileToken"][]; username?: string; }; /** * @example { - * "image": "image", - * "updater_username": "updater_username", + * "token_id": "token_id", + * "issued": "issued" + * } + */ + "db.VProfileToken": { + issued?: string; + token_id?: string; + }; + /** + * @example { + * "image": "{}", * "federal_id": "federal_id", - * "creator_username": "creator_username", + * "created_at": "created_at", + * "created_by": "created_by", * "instrument_count": 0, - * "update_date": "update_date", * "office_id": "office_id", + * "updated_by_username": "updated_by_username", * "instrument_group_count": 6, - * "creator_id": "creator_id", + * "updated_at": "updated_at", * "name": "name", - * "updater_id": "updater_id", + * "updated_by": "updated_by", + * "created_by_username": "created_by_username", * "district_id": "district_id", * "id": "id", - * "create_date": "create_date", * "slug": "slug" * } */ - Project: { - create_date?: string; - creator_id?: string; - creator_username?: string; + "db.VProject": { + created_at?: string; + created_by?: string; + created_by_username?: string; district_id?: string; federal_id?: string; id?: string; - image?: string; + image?: Record; instrument_count?: number; instrument_group_count?: number; name?: string; office_id?: string; slug?: string; - update_date?: string; - updater_id?: string; - updater_username?: string; - }; - /** - * @example { - * "project_count": 0 - * } - */ - ProjectCount: { - project_count?: number; - }; - /** - * @example { - * "instrument_ids": [ - * "instrument_ids", - * "instrument_ids" - * ] - * } - */ - ProjectInstrumentAssignments: { - instrument_ids?: string[]; - }; - /** - * @example { - * "role": "role", - * "role_id": "role_id", - * "profile_id": "profile_id", - * "id": "id", - * "email": "email", - * "username": "username" - * } - */ - ProjectMembership: { - email?: string; - id?: string; - profile_id?: string; - role?: string; - role_id?: string; - username?: string; + updated_at?: string; + updated_by?: string; + updated_by_username?: string; }; /** * @example { - * "updater_username": "updater_username", - * "creator_username": "creator_username", * "global_overrides": { * "date_range": { * "value": "value", @@ -8991,16 +8653,18 @@ export interface components { * "enabled": true * } * }, + * "created_at": "created_at", * "description": "description", * "project_name": "project_name", - * "update_date": "update_date", + * "created_by": "created_by", + * "updated_by_username": "updated_by_username", * "district_name": "district_name", + * "updated_at": "updated_at", * "project_id": "project_id", - * "creator_id": "creator_id", * "name": "name", - * "updater_id": "updater_id", + * "updated_by": "updated_by", + * "created_by_username": "created_by_username", * "id": "id", - * "create_date": "create_date", * "plot_configs": [ * { * "name": "name", @@ -9016,322 +8680,67 @@ export interface components { * "slug": "slug" * } */ - ReportConfig: { - create_date?: string; - creator_id?: string; - creator_username?: string; + "db.VReportConfig": { + created_at?: string; + created_by?: string; + created_by_username?: string; description?: string; district_name?: string; - global_overrides?: components["schemas"]["ReportConfigGlobalOverrides"]; + global_overrides?: components["schemas"]["db.ReportConfigGlobalOverrides"]; id?: string; name?: string; - plot_configs?: components["schemas"]["IDSlugName"][]; + plot_configs?: components["schemas"]["db.IDSlugName"][]; project_id?: string; project_name?: string; slug?: string; - update_date?: string; - updater_id?: string; - updater_username?: string; + updated_at?: string; + updated_by?: string; + updated_by_username?: string; }; /** * @example { - * "date_range": { - * "value": "value", - * "enabled": true - * }, - * "show_nonvalidated": { - * "value": true, - * "enabled": true - * }, - * "show_masked": { - * "value": true, - * "enabled": true - * } + * "time": "time", + * "instrument_id": "instrument_id", + * "measurements": [ + * { + * "elevation": 0.8008281904610115, + * "temp": 1.4658129805029452, + * "z_cum_dev": 1.2315135367772556, + * "y_increment": 4.145608029883936, + * "x_cum_dev": 7.061401241503109, + * "temp_increment": 5.637376656633329, + * "z_increment": 1.0246457001441578, + * "y_cum_dev": 2.027123023002322, + * "x_increment": 9.301444243932576, + * "x": 2.3021358869347655, + * "y": 3.616076749251911, + * "z": 7.386281948385884, + * "segment_id": 6, + * "temp_cum_dev": 5.962133916683182 + * }, + * { + * "elevation": 0.8008281904610115, + * "temp": 1.4658129805029452, + * "z_cum_dev": 1.2315135367772556, + * "y_increment": 4.145608029883936, + * "x_cum_dev": 7.061401241503109, + * "temp_increment": 5.637376656633329, + * "z_increment": 1.0246457001441578, + * "y_cum_dev": 2.027123023002322, + * "x_increment": 9.301444243932576, + * "x": 2.3021358869347655, + * "y": 3.616076749251911, + * "z": 7.386281948385884, + * "segment_id": 6, + * "temp_cum_dev": 5.962133916683182 + * } + * ] * } */ - ReportConfigGlobalOverrides: { - date_range?: components["schemas"]["TextOption"]; - show_masked?: components["schemas"]["ToggleOption"]; - show_nonvalidated?: components["schemas"]["ToggleOption"]; - }; - /** - * @example { - * "updater_username": "updater_username", - * "creator_username": "creator_username", - * "global_overrides": { - * "date_range": { - * "value": "value", - * "enabled": true - * }, - * "show_nonvalidated": { - * "value": true, - * "enabled": true - * }, - * "show_masked": { - * "value": true, - * "enabled": true - * } - * }, - * "description": "description", - * "project_name": "project_name", - * "update_date": "update_date", - * "district_name": "district_name", - * "project_id": "project_id", - * "creator_id": "creator_id", - * "name": "name", - * "updater_id": "updater_id", - * "id": "id", - * "create_date": "create_date", - * "plot_configs": [ - * { - * "date_range": "date_range", - * "updater_username": "updater_username", - * "creator_username": "creator_username", - * "display": { - * "layout": { - * "custom_shapes": [ - * { - * "color": "color", - * "data_point": 0.8008281904610115, - * "name": "name", - * "plot_configuration_id": "plot_configuration_id", - * "enabled": true - * }, - * { - * "color": "color", - * "data_point": 0.8008281904610115, - * "name": "name", - * "plot_configuration_id": "plot_configuration_id", - * "enabled": true - * } - * ], - * "y_axis_title": "y_axis_title", - * "y2_axis_title": "y2_axis_title" - * }, - * "traces": [ - * { - * "trace_type": "trace_type", - * "color": "color", - * "show_markers": true, - * "timeseries_id": "timeseries_id", - * "y_axis": "y_axis", - * "parameter": "parameter", - * "name": "name", - * "width": 1.4658129805029452, - * "line_style": "line_style", - * "plot_configuration_id": "plot_configuration_id", - * "trace_order": 6 - * }, - * { - * "trace_type": "trace_type", - * "color": "color", - * "show_markers": true, - * "timeseries_id": "timeseries_id", - * "y_axis": "y_axis", - * "parameter": "parameter", - * "name": "name", - * "width": 1.4658129805029452, - * "line_style": "line_style", - * "plot_configuration_id": "plot_configuration_id", - * "trace_order": 6 - * } - * ] - * }, - * "show_comments": true, - * "report_configs": [ - * { - * "name": "name", - * "id": "id", - * "slug": "slug" - * }, - * { - * "name": "name", - * "id": "id", - * "slug": "slug" - * } - * ], - * "auto_range": true, - * "show_masked": true, - * "threshold": 5, - * "update_date": "update_date", - * "show_nonvalidated": true, - * "project_id": "project_id", - * "creator_id": "creator_id", - * "name": "name", - * "updater_id": "updater_id", - * "id": "id", - * "create_date": "create_date", - * "plot_type": "plot_type", - * "slug": "slug" - * }, - * { - * "date_range": "date_range", - * "updater_username": "updater_username", - * "creator_username": "creator_username", - * "display": { - * "layout": { - * "custom_shapes": [ - * { - * "color": "color", - * "data_point": 0.8008281904610115, - * "name": "name", - * "plot_configuration_id": "plot_configuration_id", - * "enabled": true - * }, - * { - * "color": "color", - * "data_point": 0.8008281904610115, - * "name": "name", - * "plot_configuration_id": "plot_configuration_id", - * "enabled": true - * } - * ], - * "y_axis_title": "y_axis_title", - * "y2_axis_title": "y2_axis_title" - * }, - * "traces": [ - * { - * "trace_type": "trace_type", - * "color": "color", - * "show_markers": true, - * "timeseries_id": "timeseries_id", - * "y_axis": "y_axis", - * "parameter": "parameter", - * "name": "name", - * "width": 1.4658129805029452, - * "line_style": "line_style", - * "plot_configuration_id": "plot_configuration_id", - * "trace_order": 6 - * }, - * { - * "trace_type": "trace_type", - * "color": "color", - * "show_markers": true, - * "timeseries_id": "timeseries_id", - * "y_axis": "y_axis", - * "parameter": "parameter", - * "name": "name", - * "width": 1.4658129805029452, - * "line_style": "line_style", - * "plot_configuration_id": "plot_configuration_id", - * "trace_order": 6 - * } - * ] - * }, - * "show_comments": true, - * "report_configs": [ - * { - * "name": "name", - * "id": "id", - * "slug": "slug" - * }, - * { - * "name": "name", - * "id": "id", - * "slug": "slug" - * } - * ], - * "auto_range": true, - * "show_masked": true, - * "threshold": 5, - * "update_date": "update_date", - * "show_nonvalidated": true, - * "project_id": "project_id", - * "creator_id": "creator_id", - * "name": "name", - * "updater_id": "updater_id", - * "id": "id", - * "create_date": "create_date", - * "plot_type": "plot_type", - * "slug": "slug" - * } - * ], - * "slug": "slug" - * } - */ - ReportConfigWithPlotConfigs: { - create_date?: string; - creator_id?: string; - creator_username?: string; - description?: string; - district_name?: string; - global_overrides?: components["schemas"]["ReportConfigGlobalOverrides"]; - id?: string; - name?: string; - plot_configs?: components["schemas"]["PlotConfigScatterLinePlot"][]; - project_id?: string; - project_name?: string; - slug?: string; - update_date?: string; - updater_id?: string; - updater_username?: string; - }; - /** - * @example { - * "file_key": "file_key", - * "creator": "creator", - * "progress_update_date": "progress_update_date", - * "report_config_id": "report_config_id", - * "progress": 0, - * "file_expiry": "file_expiry", - * "id": "id", - * "create_date": "create_date", - * "status": "status" - * } - */ - ReportDownloadJob: { - create_date?: string; - creator?: string; - file_expiry?: string; - file_key?: string; - id?: string; - progress?: number; - progress_update_date?: string; - report_config_id?: string; - status?: string; - }; - /** - * @example { - * "time": "time", - * "measurements": [ - * { - * "elevation": 0.8008281904610115, - * "temp": 1.4658129805029452, - * "z_cum_dev": 1.2315135367772556, - * "y_increment": 4.145608029883936, - * "x_cum_dev": 7.061401241503109, - * "temp_increment": 5.637376656633329, - * "z_increment": 1.0246457001441578, - * "y_cum_dev": 2.027123023002322, - * "x_increment": 9.301444243932576, - * "x": 2.3021358869347655, - * "y": 3.616076749251911, - * "z": 7.386281948385884, - * "segment_id": 6, - * "temp_cum_dev": 5.962133916683182 - * }, - * { - * "elevation": 0.8008281904610115, - * "temp": 1.4658129805029452, - * "z_cum_dev": 1.2315135367772556, - * "y_increment": 4.145608029883936, - * "x_cum_dev": 7.061401241503109, - * "temp_increment": 5.637376656633329, - * "z_increment": 1.0246457001441578, - * "y_cum_dev": 2.027123023002322, - * "x_increment": 9.301444243932576, - * "x": 2.3021358869347655, - * "y": 3.616076749251911, - * "z": 7.386281948385884, - * "segment_id": 6, - * "temp_cum_dev": 5.962133916683182 - * } - * ] - * } - */ - SaaMeasurements: { - measurements?: components["schemas"]["SaaSegmentMeasurement"][]; - time?: string; + "db.VSaaMeasurement": { + instrument_id?: string; + measurements?: components["schemas"]["db.SaaMeasurement"][]; + time?: string; }; /** * @example { @@ -9345,7 +8754,7 @@ export interface components { * "length_timeseries_id": "length_timeseries_id" * } */ - SaaSegment: { + "db.VSaaSegment": { id?: number; instrument_id?: string; length?: number; @@ -9355,87 +8764,31 @@ export interface components { y_timeseries_id?: string; z_timeseries_id?: string; }; - /** - * @example { - * "elevation": 0.8008281904610115, - * "temp": 1.4658129805029452, - * "z_cum_dev": 1.2315135367772556, - * "y_increment": 4.145608029883936, - * "x_cum_dev": 7.061401241503109, - * "temp_increment": 5.637376656633329, - * "z_increment": 1.0246457001441578, - * "y_cum_dev": 2.027123023002322, - * "x_increment": 9.301444243932576, - * "x": 2.3021358869347655, - * "y": 3.616076749251911, - * "z": 7.386281948385884, - * "segment_id": 6, - * "temp_cum_dev": 5.962133916683182 - * } - */ - SaaSegmentMeasurement: { - elevation?: number; - segment_id?: number; - temp?: number; - temp_cum_dev?: number; - temp_increment?: number; - x?: number; - x_cum_dev?: number; - x_increment?: number; - y?: number; - y_cum_dev?: number; - y_increment?: number; - z?: number; - z_cum_dev?: number; - z_increment?: number; - }; - /** - * @example { - * "item": "{}", - * "id": "id", - * "type": "type" - * } - */ - SearchResult: { - id?: string; - item?: Record; - type?: string; - }; - Site: { - description?: string; - elevation?: string; - elevationUnits?: string; - siteName?: components["schemas"]["SiteName"]; - }; - SiteName: { - id?: string; - nameType?: string; - }; /** * @example { * "alert_type_id": "alert_type_id", * "alert_config_id": "alert_config_id", - * "due_date": "due_date", + * "created_at": "created_at", * "alert_config_name": "alert_config_name", * "submittal_status_id": "submittal_status_id", * "submittal_status_name": "submittal_status_name", * "warning_sent": true, + * "completed_at": "completed_at", * "project_id": "project_id", * "alert_type_name": "alert_type_name", * "marked_as_missing": true, - * "completion_date": "completion_date", - * "id": "id", - * "create_date": "create_date" + * "due_at": "due_at", + * "id": "id" * } */ - Submittal: { + "db.VSubmittal": { alert_config_id?: string; alert_config_name?: string; alert_type_id?: string; alert_type_name?: string; - completion_date?: string; - create_date?: string; - due_date?: string; + completed_at?: string; + created_at?: string; + due_at?: string; id?: string; marked_as_missing?: boolean; project_id?: string; @@ -9445,41 +8798,49 @@ export interface components { }; /** * @example { - * "value": "value", - * "enabled": true + * "instrument": "instrument", + * "type": "standard", + * "instrument_id": "instrument_id", + * "unit": "unit", + * "parameter": "parameter", + * "name": "name", + * "variable": "{}", + * "id": "id", + * "instrument_slug": "instrument_slug", + * "is_computed": true, + * "unit_id": "unit_id", + * "slug": "slug", + * "parameter_id": "parameter_id" * } */ - TextOption: { - enabled?: boolean; - value?: string; + "db.VTimeseries": { + id?: string; + instrument?: string; + instrument_id?: string; + instrument_slug?: string; + is_computed?: boolean; + name?: string; + parameter?: string; + parameter_id?: string; + slug?: string; + type?: components["schemas"]["db.TimeseriesType"]; + unit?: string; + unit_id?: string; + variable?: Record; }; /** * @example { - * "values": [ - * { - * "annotation": "annotation", - * "validated": true, - * "masked": true, - * "time": "time", - * "error": "error", - * "value": 0.8008281904610115 - * }, - * { - * "annotation": "annotation", - * "validated": true, - * "masked": true, - * "time": "time", - * "error": "error", - * "value": 0.8008281904610115 - * } - * ], + * "cwms_office_id": "cwms_office_id", * "instrument": "instrument", - * "type": "type", + * "cwms_extent_earliest_time": "cwms_extent_earliest_time", + * "type": "standard", + * "cwms_timeseries_id": "cwms_timeseries_id", * "instrument_id": "instrument_id", * "unit": "unit", * "parameter": "parameter", + * "cwms_extent_latest_time": "cwms_extent_latest_time", * "name": "name", - * "variable": "variable", + * "variable": "{}", * "id": "id", * "instrument_slug": "instrument_slug", * "is_computed": true, @@ -9488,7 +8849,11 @@ export interface components { * "parameter_id": "parameter_id" * } */ - Timeseries: { + "db.VTimeseriesCwms": { + cwms_extent_earliest_time?: string; + cwms_extent_latest_time?: string; + cwms_office_id?: string; + cwms_timeseries_id?: string; id?: string; instrument?: string; instrument_id?: string; @@ -9498,253 +8863,744 @@ export interface components { parameter?: string; parameter_id?: string; slug?: string; - type?: string; + type?: components["schemas"]["db.TimeseriesType"]; unit?: string; unit_id?: string; - values?: components["schemas"]["Measurement"][]; - variable?: string; + variable?: Record; }; /** * @example { - * "items": [ - * { - * "values": [ - * { - * "annotation": "annotation", - * "validated": true, - * "masked": true, - * "time": "time", - * "error": "error", - * "value": 0.8008281904610115 - * }, - * { - * "annotation": "annotation", - * "validated": true, - * "masked": true, - * "time": "time", - * "error": "error", - * "value": 0.8008281904610115 - * } - * ], - * "instrument": "instrument", - * "type": "type", - * "instrument_id": "instrument_id", - * "unit": "unit", - * "parameter": "parameter", - * "name": "name", - * "variable": "variable", - * "id": "id", - * "instrument_slug": "instrument_slug", - * "is_computed": true, - * "unit_id": "unit_id", - * "slug": "slug", - * "parameter_id": "parameter_id" - * }, - * { - * "values": [ - * { - * "annotation": "annotation", - * "validated": true, - * "masked": true, - * "time": "time", - * "error": "error", - * "value": 0.8008281904610115 - * }, - * { - * "annotation": "annotation", - * "validated": true, - * "masked": true, - * "time": "time", - * "error": "error", - * "value": 0.8008281904610115 - * } - * ], - * "instrument": "instrument", - * "type": "type", - * "instrument_id": "instrument_id", - * "unit": "unit", - * "parameter": "parameter", - * "name": "name", - * "variable": "variable", - * "id": "id", - * "instrument_slug": "instrument_slug", - * "is_computed": true, - * "unit_id": "unit_id", - * "slug": "slug", - * "parameter_id": "parameter_id" - * } - * ] + * "measure": "measure", + * "unit_family_id": "unit_family_id", + * "name": "name", + * "unit_family": "unit_family", + * "id": "id", + * "abbreviation": "abbreviation", + * "measure_id": "measure_id" * } */ - TimeseriesCollectionItems: { - items?: components["schemas"]["Timeseries"][]; + "db.VUnit": { + abbreviation?: string; + id?: string; + measure?: string; + measure_id?: string; + name?: string; + unit_family?: string; + unit_family_id?: string; }; /** * @example { - * "cwms_office_id": "cwms_office_id", - * "values": [ + * "validated_field": "validated_field", + * "created_at": "created_at", + * "description": "description", + * "row_offset": 6, + * "comment_field": "comment_field", + * "type": "csv", + * "created_by": "created_by", + * "updated_by_username": "updated_by_username", + * "time_field": "time_field", + * "masked_field": "masked_field", + * "tz_name": "tz_name", + * "updated_at": "updated_at", + * "comment_field_enabled": true, + * "project_id": "project_id", + * "column_offset": 0, + * "name": "name", + * "updated_by": "updated_by", + * "masked_field_enabled": true, + * "created_by_username": "created_by_username", + * "id": "id", + * "validated_field_enabled": true, + * "slug": "slug" + * } + */ + "db.VUploaderConfig": { + column_offset?: number; + comment_field?: string; + comment_field_enabled?: boolean; + created_at?: string; + created_by?: string; + created_by_username?: string; + description?: string; + id?: string; + masked_field?: string; + masked_field_enabled?: boolean; + name?: string; + project_id?: string; + row_offset?: number; + slug?: string; + time_field?: string; + type?: components["schemas"]["db.UploaderConfigType"]; + tz_name?: string; + updated_at?: string; + updated_by?: string; + updated_by_username?: string; + validated_field?: string; + validated_field_enabled?: boolean; + }; + "dto.AlertConfig": { + alert_email_subscriptions?: components["schemas"]["dto.EmailAutocompleteResult"][]; + alert_type?: string; + alert_type_id?: string; + body?: string; + created_at?: string; + created_by?: string; + created_by_username?: string; + id?: string; + instruments?: components["schemas"]["dto.AlertConfigInstrument"][]; + last_checked?: string; + last_reminded?: string; + mute_consecutive_alerts?: boolean; + name?: string; + project_id?: string; + project_name?: string; + remind_interval?: string; + schedule_interval?: string; + started_at?: string; + updated_by?: string; + updated_by_username?: string; + updatedd_at?: string; + warning_interval?: string; + }; + "dto.AlertConfigInstrument": { + instrument_id?: string; + instrument_name?: string; + }; + "dto.AlertSubscription": { + alert_config_id?: string; + id?: string; + mute_notify?: boolean; + mute_ui?: boolean; + profile_id?: string; + }; + /** + * @example { + * "formula_name": "formula_name", + * "formula": "formula", + * "id": "id", + * "instrument_id": "instrument_id", + * "unit_id": "unit_id", + * "slug": "slug", + * "parameter_id": "parameter_id" + * } + */ + "dto.CalculatedTimeseries": { + formula?: string; + formula_name?: string; + id?: string; + instrument_id?: string; + parameter_id?: string; + slug?: string; + unit_id?: string; + }; + "dto.CollectionGroup": { + created_at?: string; + created_by?: string; + created_by_username?: string; + id?: string; + name?: string; + project_id?: string; + slug?: string; + sort_order?: number; + updated_by?: string; + updated_by_username?: string; + updatedd_at?: string; + }; + "dto.Datalogger": { + created_at?: string; + created_by?: string; + created_by_username?: string; + errors?: string[]; + id?: string; + model?: string; + model_id?: string; + name?: string; + project_id?: string; + slug?: string; + sn?: string; + tables?: components["schemas"]["dto.DataloggerTable"][]; + updated_by?: string; + updated_by_username?: string; + updatedd_at?: string; + }; + "dto.DataloggerTable": { + id?: string; + table_name?: string; + }; + "dto.EmailAutocompleteResult": { + email?: string; + id?: string; + user_type?: string; + username?: string; + }; + "dto.EquivalencyTable": { + datalogger_id?: string; + datalogger_table_id?: string; + datalogger_table_name?: string; + rows?: components["schemas"]["dto.EquivalencyTableRow"][]; + }; + "dto.EquivalencyTableRow": { + display_name?: string; + field_name?: string; + id?: string; + instrument_id?: string; + timeseries_id?: string; + }; + /** + * @example { + * "alert_config_id": "alert_config_id", + * "created_at": "created_at", + * "alert_config_name": "alert_config_name", + * "body": "body", + * "project_name": "project_name", + * "created_by": "created_by", + * "submittal_id": "submittal_id", + * "updated_by_username": "updated_by_username", + * "instruments": [ * { - * "annotation": "annotation", - * "validated": true, - * "masked": true, - * "time": "time", - * "error": "error", - * "value": 0.8008281904610115 + * "instrument_name": "instrument_name", + * "instrument_id": "instrument_id" * }, * { - * "annotation": "annotation", - * "validated": true, - * "masked": true, - * "time": "time", - * "error": "error", - * "value": 0.8008281904610115 + * "instrument_name": "instrument_name", + * "instrument_id": "instrument_id" * } * ], - * "instrument": "instrument", - * "cwms_extent_earliest_time": "cwms_extent_earliest_time", - * "type": "type", - * "cwms_timeseries_id": "cwms_timeseries_id", - * "instrument_id": "instrument_id", - * "unit": "unit", - * "parameter": "parameter", - * "cwms_extent_latest_time": "cwms_extent_latest_time", + * "project_id": "project_id", * "name": "name", - * "variable": "variable", + * "updated_by": "updated_by", + * "started_at": "started_at", + * "updatedd_at": "updatedd_at", + * "created_by_username": "created_by_username", * "id": "id", - * "instrument_slug": "instrument_slug", - * "is_computed": true, - * "unit_id": "unit_id", - * "slug": "slug", - * "parameter_id": "parameter_id" + * "ended_at": "ended_at" * } */ - TimeseriesCwms: { - cwms_extent_earliest_time?: string; - cwms_extent_latest_time?: string; - cwms_office_id?: string; - cwms_timeseries_id?: string; + "dto.Evaluation": { + alert_config_id?: string; + alert_config_name?: string; + body?: string; + created_at?: string; + created_by?: string; + created_by_username?: string; + ended_at?: string; id?: string; - instrument?: string; + instruments?: components["schemas"]["dto.EvaluationInstrument"][]; + name?: string; + project_id?: string; + project_name?: string; + started_at?: string; + submittal_id?: string; + updated_by?: string; + updated_by_username?: string; + updatedd_at?: string; + }; + /** + * @example { + * "instrument_name": "instrument_name", + * "instrument_id": "instrument_id" + * } + */ + "dto.EvaluationInstrument": { instrument_id?: string; - instrument_slug?: string; - is_computed?: boolean; + instrument_name?: string; + }; + /** + * @example { + * "name": "name", + * "id": "id", + * "slug": "slug" + * } + */ + "dto.IDSlugName": { + id?: string; name?: string; - parameter?: string; - parameter_id?: string; slug?: string; - type?: string; - unit?: string; - unit_id?: string; - values?: components["schemas"]["Measurement"][]; - variable?: string; }; /** * @example { - * "items": [ + * "depth_timeseries_id": "depth_timeseries_id", + * "b180_timeseries_id": "b180_timeseries_id", + * "a180_timeseries_id": "a180_timeseries_id", + * "id": 0, + * "instrument_id": "instrument_id", + * "a0_timeseries_id": "a0_timeseries_id", + * "b0_timeseries_id": "b0_timeseries_id" + * } + */ + "dto.InclSegment": { + a0_timeseries_id?: string; + a180_timeseries_id?: string; + b0_timeseries_id?: string; + b180_timeseries_id?: string; + depth_timeseries_id?: string; + id?: number; + instrument_id?: string; + }; + /** + * @example { + * "has_cwms": true, + * "projects": [ * { - * "timeseries_id": "timeseries_id", - * "items": [ - * { - * "annotation": "annotation", - * "validated": true, - * "masked": true, - * "time": "time", - * "error": "error", - * "value": 0.8008281904610115 - * }, - * { - * "annotation": "annotation", - * "validated": true, - * "masked": true, - * "time": "time", - * "error": "error", - * "value": 0.8008281904610115 - * } - * ] + * "name": "name", + * "id": "id", + * "slug": "slug" * }, * { - * "timeseries_id": "timeseries_id", - * "items": [ - * { - * "annotation": "annotation", - * "validated": true, - * "masked": true, - * "time": "time", - * "error": "error", - * "value": 0.8008281904610115 - * }, - * { - * "annotation": "annotation", - * "validated": true, - * "masked": true, - * "time": "time", - * "error": "error", - * "value": 0.8008281904610115 - * } - * ] + * "name": "name", + * "id": "id", + * "slug": "slug" * } - * ] + * ], + * "alert_configs": [ + * "alert_configs", + * "alert_configs" + * ], + * "icon": "icon", + * "created_at": "created_at", + * "type": "type", + * "aware_id": "aware_id", + * "updated_by_username": "updated_by_username", + * "status_id": "status_id", + * "opts": { + * "key": "" + * }, + * "station": 1, + * "created_by_username": "created_by_username", + * "constants": [ + * "constants", + * "constants" + * ], + * "id": "id", + * "status_time": "status_time", + * "slug": "slug", + * "offset": 6, + * "type_id": "type_id", + * "show_cwms_tab": true, + * "usgs_id": "usgs_id", + * "groups": [ + * "groups", + * "groups" + * ], + * "created_by": "created_by", + * "name": "name", + * "updated_by": "updated_by", + * "updatedd_at": "updatedd_at", + * "geometry": [ + * 0, + * 0 + * ], + * "nid_id": "nid_id", + * "status": "status" * } */ - TimeseriesMeasurementCollectionCollection: { - items?: components["schemas"]["MeasurementCollection"][]; + "dto.Instrument": { + alert_configs?: string[]; + aware_id?: string; + constants?: string[]; + created_at?: string; + created_by?: string; + created_by_username?: string; + geometry?: number[]; + groups?: string[]; + has_cwms?: boolean; + icon?: string; + id?: string; + name?: string; + nid_id?: string; + offset?: number; + opts?: { + [key: string]: unknown; + }; + projects?: components["schemas"]["dto.IDSlugName"][]; + show_cwms_tab?: boolean; + slug?: string; + station?: number; + status?: string; + status_id?: string; + status_time?: string; + type?: string; + type_id?: string; + updated_by?: string; + updated_by_username?: string; + updatedd_at?: string; + usgs_id?: string; + }; + "dto.InstrumentGroup": { + created_at?: string; + created_by?: string; + created_by_username?: string; + description?: string; + id?: string; + instrument_count?: number; + name?: string; + project_id?: string; + slug?: string; + timeseries_count?: number; + updated_by?: string; + updated_by_username?: string; + updatedd_at?: string; + }; + "dto.InstrumentNote": { + body?: string; + created_at?: string; + created_by?: string; + created_by_username?: string; + id?: string; + instrument_id?: string; + time?: string; + title?: string; + updated_by?: string; + updated_by_username?: string; + updatedd_at?: string; + }; + "dto.InstrumentNoteCollection": { + items?: components["schemas"]["dto.InstrumentNote"][]; + }; + "dto.InstrumentProjectAssignments": { + project_ids?: string[]; + }; + "dto.InstrumentStatus": { + id?: string; + status?: string; + status_id?: string; + time?: string; + }; + "dto.InstrumentStatusCollection": { + items?: components["schemas"]["dto.InstrumentStatus"][]; }; /** * @example { - * "value": true, - * "enabled": true + * "temp_timeseries_id": "temp_timeseries_id", + * "length": 6.027456183070403, + * "tilt_timeseries_id": "tilt_timeseries_id", + * "id": 0, + * "inc_dev_timeseries_id": "inc_dev_timeseries_id", + * "instrument_id": "instrument_id", + * "length_timeseries_id": "length_timeseries_id" * } */ - ToggleOption: { + "dto.IpiSegment": { + id?: number; + inc_dev_timeseries_id?: string; + instrument_id?: string; + length?: number; + length_timeseries_id?: string; + temp_timeseries_id?: string; + tilt_timeseries_id?: string; + }; + /** + * @example { + * "annotation": "annotation", + * "validated": true, + * "masked": true, + * "time": "time", + * "error": "error", + * "value": 0.8008281904610115 + * } + */ + "dto.Measurement": { + annotation?: string; + error?: string; + masked?: boolean; + time?: string; + validated?: boolean; + value?: number; + }; + "dto.MeasurementCollection": { + items?: components["schemas"]["dto.Measurement"][]; + timeseries_id?: string; + }; + "dto.Opts": { + [key: string]: unknown; + }; + "dto.PlotConfigBullseyePlot": { + auto_range?: boolean; + created_at?: string; + created_by?: string; + created_by_username?: string; + date_range?: string; + display?: components["schemas"]["dto.PlotConfigBullseyePlotDisplay"]; + id?: string; + name?: string; + plot_type?: string; + project_id?: string; + report_configs?: components["schemas"]["dto.IDSlugName"][]; + show_comments?: boolean; + show_masked?: boolean; + show_nonvalidated?: boolean; + slug?: string; + threshold?: number; + updated_by?: string; + updated_by_username?: string; + updatedd_at?: string; + }; + "dto.PlotConfigBullseyePlotDisplay": { + x_axis_timeseries_id?: string; + y_axis_timeseries_id?: string; + }; + "dto.PlotConfigContourPlot": { + auto_range?: boolean; + created_at?: string; + created_by?: string; + created_by_username?: string; + date_range?: string; + display?: components["schemas"]["dto.PlotConfigContourPlotDisplay"]; + id?: string; + name?: string; + plot_type?: string; + project_id?: string; + report_configs?: components["schemas"]["dto.IDSlugName"][]; + show_comments?: boolean; + show_masked?: boolean; + show_nonvalidated?: boolean; + slug?: string; + threshold?: number; + updated_by?: string; + updated_by_username?: string; + updatedd_at?: string; + }; + "dto.PlotConfigContourPlotDisplay": { + contour_smoothing?: boolean; + gradient_smoothing?: boolean; + locf_backfill?: string; + show_labels?: boolean; + time?: string; + timeseries_ids?: string[]; + }; + "dto.PlotConfigProfilePlot": { + auto_range?: boolean; + created_at?: string; + created_by?: string; + created_by_username?: string; + date_range?: string; + display?: components["schemas"]["dto.PlotConfigProfilePlotDisplay"]; + id?: string; + name?: string; + plot_type?: string; + project_id?: string; + report_configs?: components["schemas"]["dto.IDSlugName"][]; + show_comments?: boolean; + show_masked?: boolean; + show_nonvalidated?: boolean; + slug?: string; + threshold?: number; + updated_by?: string; + updated_by_username?: string; + updatedd_at?: string; + }; + "dto.PlotConfigProfilePlotDisplay": { + instrument_id?: string; + instrument_type?: string; + }; + "dto.PlotConfigScatterLineCustomShape": { + color?: string; + data_point?: number; enabled?: boolean; - value?: boolean; + name?: string; + plot_configuration_id?: string; + }; + "dto.PlotConfigScatterLineDisplay": { + layout?: components["schemas"]["dto.PlotConfigScatterLineLayout"]; + traces?: components["schemas"]["dto.PlotConfigScatterLineTimeseriesTrace"][]; + }; + "dto.PlotConfigScatterLineLayout": { + custom_shapes?: components["schemas"]["dto.PlotConfigScatterLineCustomShape"][]; + y2_axis_title?: string; + y_axis_title?: string; + }; + "dto.PlotConfigScatterLinePlot": { + auto_range?: boolean; + created_at?: string; + created_by?: string; + created_by_username?: string; + date_range?: string; + display?: components["schemas"]["dto.PlotConfigScatterLineDisplay"]; + id?: string; + name?: string; + plot_type?: string; + project_id?: string; + report_configs?: components["schemas"]["dto.IDSlugName"][]; + show_comments?: boolean; + show_masked?: boolean; + show_nonvalidated?: boolean; + slug?: string; + threshold?: number; + updated_by?: string; + updated_by_username?: string; + updatedd_at?: string; + }; + "dto.PlotConfigScatterLineTimeseriesTrace": { + color?: string; + line_style?: string; + /** @description read-only */ + name?: string; + /** @description read-only */ + parameter?: string; + plot_configuration_id?: string; + show_markers?: boolean; + timeseries_id?: string; + trace_order?: number; + trace_type?: string; + width?: number; + /** @description y1 or y2, default y1 */ + y_axis?: string; }; /** * @example { - * "token_id": "token_id", - * "profile_id": "profile_id", - * "issued": "issued", - * "secret_token": "secret_token" + * "image": "image", + * "federal_id": "federal_id", + * "created_at": "created_at", + * "created_by": "created_by", + * "instrument_count": 0, + * "office_id": "office_id", + * "updated_by_username": "updated_by_username", + * "instrument_group_count": 6, + * "name": "name", + * "updated_by": "updated_by", + * "updatedd_at": "updatedd_at", + * "created_by_username": "created_by_username", + * "district_id": "district_id", + * "id": "id", + * "slug": "slug" * } */ - Token: { - issued?: string; - profile_id?: string; - secret_token?: string; - token_id?: string; + "dto.Project": { + created_at?: string; + created_by?: string; + created_by_username?: string; + district_id?: string; + federal_id?: string; + id?: string; + image?: string; + instrument_count?: number; + instrument_group_count?: number; + name?: string; + office_id?: string; + slug?: string; + updated_by?: string; + updated_by_username?: string; + updatedd_at?: string; + }; + "dto.ProjectInstrumentAssignments": { + instrument_ids?: string[]; + }; + "dto.ReportConfig": { + created_at?: string; + created_by?: string; + created_by_username?: string; + description?: string; + district_name?: string; + global_overrides?: components["schemas"]["dto.ReportConfigGlobalOverrides"]; + id?: string; + name?: string; + plot_configs?: components["schemas"]["dto.IDSlugName"][]; + project_id?: string; + project_name?: string; + slug?: string; + updated_by?: string; + updated_by_username?: string; + updatedd_at?: string; + }; + "dto.ReportConfigGlobalOverrides": { + date_range?: components["schemas"]["dto.TextOption"]; + show_masked?: components["schemas"]["dto.ToggleOption"]; + show_nonvalidated?: components["schemas"]["dto.ToggleOption"]; + }; + "dto.ReportDownloadJob": { + created_at?: string; + created_by?: string; + file_expiry?: string; + file_key?: string; + id?: string; + progress?: number; + progress_updated_at?: string; + report_config_id?: string; + status?: string; }; /** * @example { - * "token_id": "token_id", - * "issued": "issued" + * "z_timeseries_id": "z_timeseries_id", + * "temp_timeseries_id": "temp_timeseries_id", + * "y_timeseries_id": "y_timeseries_id", + * "x_timeseries_id": "x_timeseries_id", + * "length": 6.027456183070403, + * "id": 0, + * "instrument_id": "instrument_id", + * "length_timeseries_id": "length_timeseries_id" * } */ - TokenInfoProfile: { - issued?: string; - token_id?: string; + "dto.SaaSegment": { + id?: number; + instrument_id?: string; + length?: number; + length_timeseries_id?: string; + temp_timeseries_id?: string; + x_timeseries_id?: string; + y_timeseries_id?: string; + z_timeseries_id?: string; + }; + "dto.TextOption": { + enabled?: boolean; + value?: string; }; /** * @example { - * "measure": "measure", - * "unit_family_id": "unit_family_id", + * "values": [ + * { + * "annotation": "annotation", + * "validated": true, + * "masked": true, + * "time": "time", + * "error": "error", + * "value": 0.8008281904610115 + * }, + * { + * "annotation": "annotation", + * "validated": true, + * "masked": true, + * "time": "time", + * "error": "error", + * "value": 0.8008281904610115 + * } + * ], + * "instrument": "instrument", + * "type": "type", + * "instrument_id": "instrument_id", + * "unit": "unit", + * "parameter": "parameter", * "name": "name", - * "unit_family": "unit_family", + * "variable": "variable", * "id": "id", - * "abbreviation": "abbreviation", - * "measure_id": "measure_id" + * "instrument_slug": "instrument_slug", + * "is_computed": true, + * "unit_id": "unit_id", + * "slug": "slug", + * "parameter_id": "parameter_id" * } */ - Unit: { - abbreviation?: string; + "dto.Timeseries": { id?: string; - measure?: string; - measure_id?: string; + instrument?: string; + instrument_id?: string; + instrument_slug?: string; + is_computed?: boolean; name?: string; - unit_family?: string; - unit_family_id?: string; + parameter?: string; + parameter_id?: string; + slug?: string; + type?: string; + unit?: string; + unit_id?: string; + values?: components["schemas"]["dto.Measurement"][]; + variable?: string; + }; + "dto.TimeseriesCollectionItems": { + items?: components["schemas"]["dto.Timeseries"][]; }; /** * @example { + * "cwms_office_id": "cwms_office_id", * "values": [ * { * "annotation": "annotation", @@ -9764,30 +9620,33 @@ export interface components { * } * ], * "instrument": "instrument", + * "cwms_extent_earliest_time": "cwms_extent_earliest_time", * "type": "type", + * "cwms_timeseries_id": "cwms_timeseries_id", * "instrument_id": "instrument_id", * "unit": "unit", * "parameter": "parameter", + * "cwms_extent_latest_time": "cwms_extent_latest_time", * "name": "name", * "variable": "variable", - * "latest_value": 0.8008281904610115, * "id": "id", * "instrument_slug": "instrument_slug", * "is_computed": true, - * "latest_time": "latest_time", * "unit_id": "unit_id", * "slug": "slug", * "parameter_id": "parameter_id" * } */ - collectionGroupDetailsTimeseries: { + "dto.TimeseriesCwms": { + cwms_extent_earliest_time?: string; + cwms_extent_latest_time?: string; + cwms_office_id?: string; + cwms_timeseries_id?: string; id?: string; instrument?: string; instrument_id?: string; instrument_slug?: string; is_computed?: boolean; - latest_time?: string; - latest_value?: number; name?: string; parameter?: string; parameter_id?: string; @@ -9795,24 +9654,314 @@ export interface components { type?: string; unit?: string; unit_id?: string; - values?: components["schemas"]["Measurement"][]; + values?: components["schemas"]["dto.Measurement"][]; variable?: string; }; + "dto.TimeseriesMeasurementCollectionCollection": { + items?: components["schemas"]["dto.MeasurementCollection"][]; + }; + "dto.ToggleOption": { + enabled?: boolean; + value?: boolean; + }; + "dto.UploaderConfig": { + column_offset?: number; + comment_field?: string; + comment_field_enabled?: boolean; + created_at?: string; + created_by?: string; + created_by_username?: string; + description?: string; + id?: string; + masked_field?: string; + masked_field_enabled?: boolean; + name?: string; + project_id?: string; + row_offset?: number; + slug?: string; + time_field?: string; + type?: components["schemas"]["dto.UploaderConfigType"]; + tz_name?: string; + updated_by?: string; + updated_by_username?: string; + updatedd_at?: string; + validated_field?: string; + validated_field_enabled?: boolean; + }; /** * @example { - * "bytes": [ - * 0, - * 0 + * "timeseries_id": "timeseries_id", + * "field_name": "field_name" + * } + */ + "dto.UploaderConfigMapping": { + field_name?: string; + timeseries_id?: string; + }; + /** @enum {string} */ + "dto.UploaderConfigType": "csv" | "dux" | "toa5"; + /** + * @example { + * "x": [ + * 0.8008281904610115, + * 0.8008281904610115 + * ], + * "y": [ + * 6.027456183070403, + * 6.027456183070403 + * ], + * "z": [ + * 1.4658129805029452, + * 1.4658129805029452 + * ] + * } + */ + "service.AggregatePlotConfigMeasurementsContourPlot": { + x?: number[]; + y?: number[]; + z?: number[]; + }; + /** + * @example { + * "aware_parameters": { + * "key": "aware_parameters" + * }, + * "instrument_id": "instrument_id", + * "aware_id": "aware_id" + * } + */ + "service.AwarePlatformParameterConfig": { + aware_id?: string; + aware_parameters?: { + [key: string]: string; + }; + instrument_id?: string; + }; + /** + * @example { + * "created_at": "created_at", + * "model_id": "model_id", + * "created_by": "created_by", + * "updated_by_username": "updated_by_username", + * "tables": [ + * { + * "id": "id", + * "table_name": "table_name" + * }, + * { + * "id": "id", + * "table_name": "table_name" + * } + * ], + * "updated_at": "updated_at", + * "project_id": "project_id", + * "name": "name", + * "updated_by": "updated_by", + * "created_by_username": "created_by_username", + * "model": "model", + * "id": "id", + * "sn": "sn", + * "errors": [ + * "errors", + * "errors" + * ], + * "key": "key", + * "slug": "slug" + * } + */ + "service.DataloggerWithKey": { + created_at?: string; + created_by?: string; + created_by_username?: string; + errors?: string[]; + id?: string; + key?: string; + model?: string; + model_id?: string; + name?: string; + project_id?: string; + slug?: string; + sn?: string; + tables?: components["schemas"]["db.DataloggerTableIDName"][]; + updated_at?: string; + updated_by?: string; + updated_by_username?: string; + }; + "service.DomainMap": { + [key: string]: components["schemas"]["db.DomainGroupOpt"][]; + }; + /** + * @example { + * "status": "status" + * } + */ + "service.Healthcheck": { + status?: string; + }; + /** + * @example { + * "time": "time" + * } + */ + "service.Heartbeat": { + time?: string; + }; + /** + * @example { + * "is_valid": true, + * "errors": [ + * "errors", + * "errors" + * ] + * } + */ + "service.InstrumentsValidation": { + errors?: string[]; + is_valid?: boolean; + }; + /** + * @example { + * "project_count": 0 + * } + */ + "service.ProjectCount": { + project_count?: number; + }; + /** + * @example { + * "global_overrides": { + * "date_range": { + * "value": "value", + * "enabled": true + * }, + * "show_nonvalidated": { + * "value": true, + * "enabled": true + * }, + * "show_masked": { + * "value": true, + * "enabled": true + * } + * }, + * "created_at": "created_at", + * "description": "description", + * "project_name": "project_name", + * "created_by": "created_by", + * "updated_by_username": "updated_by_username", + * "district_name": "district_name", + * "updated_at": "updated_at", + * "project_id": "project_id", + * "name": "name", + * "updated_by": "updated_by", + * "created_by_username": "created_by_username", + * "id": "id", + * "plot_configs": [ + * { + * "date_range": "date_range", + * "display": "{}", + * "show_comments": true, + * "report_configs": [ + * { + * "name": "name", + * "id": "id", + * "slug": "slug" + * }, + * { + * "name": "name", + * "id": "id", + * "slug": "slug" + * } + * ], + * "created_at": "created_at", + * "auto_range": true, + * "show_masked": true, + * "threshold": 0, + * "created_by": "created_by", + * "show_nonvalidated": true, + * "updated_at": "updated_at", + * "project_id": "project_id", + * "name": "name", + * "updated_by": "updated_by", + * "id": "id", + * "plot_type": "scatter-line", + * "slug": "slug" + * }, + * { + * "date_range": "date_range", + * "display": "{}", + * "show_comments": true, + * "report_configs": [ + * { + * "name": "name", + * "id": "id", + * "slug": "slug" + * }, + * { + * "name": "name", + * "id": "id", + * "slug": "slug" + * } + * ], + * "created_at": "created_at", + * "auto_range": true, + * "show_masked": true, + * "threshold": 0, + * "created_by": "created_by", + * "show_nonvalidated": true, + * "updated_at": "updated_at", + * "project_id": "project_id", + * "name": "name", + * "updated_by": "updated_by", + * "id": "id", + * "plot_type": "scatter-line", + * "slug": "slug" + * } * ], - * "status": 6 + * "slug": "slug" + * } + */ + "service.ReportConfigWithPlotConfigs": { + created_at?: string; + created_by?: string; + created_by_username?: string; + description?: string; + district_name?: string; + global_overrides?: components["schemas"]["db.ReportConfigGlobalOverrides"]; + id?: string; + name?: string; + plot_configs?: components["schemas"]["db.VPlotConfiguration"][]; + project_id?: string; + project_name?: string; + slug?: string; + updated_at?: string; + updated_by?: string; + updated_by_username?: string; + }; + /** + * @example { + * "token_id": "token_id", + * "profile_id": "profile_id", + * "id": "id", + * "issued": "issued", + * "hash": "hash", + * "secret_token": "secret_token" * } */ - "pgtype.JSON": { - bytes?: number[]; - status?: components["schemas"]["pgtype.Status"]; + "service.Token": { + hash?: string; + id?: string; + issued?: string; + profile_id?: string; + secret_token?: string; + token_id?: string; + }; + _timeseries_measurements_post_request: { + /** + * Format: binary + * @description TOA5 file of timeseries measurement collections + */ + timeseries_measurement_collections?: string; }; - /** @enum {integer} */ - "pgtype.Status": 0 | 1 | 2; }; responses: never; parameters: never; diff --git a/sqlc.yml b/sqlc.yml new file mode 100644 index 00000000..41618cfb --- /dev/null +++ b/sqlc.yml @@ -0,0 +1,154 @@ +version: "2" +sql: + - engine: "postgresql" + queries: "api/queries/*.sql" + schema: + - "api/migrations/schema/*.sql" + - "api/migrations/repeat/*.sql" + gen: + go: + package: "db" + out: "api/internal/db" + sql_package: "pgx/v5" + output_files_suffix: "_gen" + emit_json_tags: true + emit_interface: true + emit_empty_slices: true + emit_exact_table_names: true + emit_pointers_for_null_types: true + overrides: + # uuid + - db_type: uuid + go_type: github.com/google/uuid.UUID + - db_type: uuid + nullable: true + go_type: + type: uuid.UUID + pointer: true + + # timestamptz + - db_type: timestamptz + go_type: time.Time + - db_type: timestamptz + nullable: true + go_type: + type: time.Time + pointer: true + + # interval + - db_type: pg_catalog.interval + go_type: string + - db_type: pg_catalog.interval + nullable: true + go_type: + type: string + pointer: true + + # v_alert + - column: v_alert.instruments + go_type: + type: InstrumentIDName + slice: true + + # v_alert_check_measurement_submittal + - column: v_alert_check_measurement_submittal.affected_timeseries + go_type: + type: AlertCheckMeasurementSubmittalAffectedTimeseries + slice: true + - column: v_alert_check_evaluation_submittal.submittal + nullable: true + go_type: + type: VSubmittal + pointer: true + - column: v_alert_check_measurement_submittal.submittal + nullable: true + go_type: + type: VSubmittal + pointer: true + + # v_alert_config + - column: v_alert_config.instruments + go_type: + type: InstrumentIDName + slice: true + - column: v_alert_config.alert_email_subscriptions + go_type: + type: EmailAutocompleteResult + slice: true + + # v_collection_group_details + - column: v_collection_group_details.timeseries + go_type: + type: CollectionGroupDetailsTimeseries + slice: true + + # v_datalogger + - column: v_datalogger.tables + go_type: + type: DataloggerTableIDName + slice: true + + # v_datalogger_equivalency_table + - column: v_datalogger_equivalency_table.fields + go_type: + type: DataloggerEquivalencyTableField + slice: true + - column: v_domain_group.opts + go_type: + type: DomainGroupOpt + slice: true + + # v_datalogger_preview + - column: v_datalogger_preview.preview + go_type: encoding/json.RawMessage + + # v_evaluation + - column: v_evaluation.instruments + go_type: + type: InstrumentIDName + slice: true + + # v_instrument + - column: v_instrument.geometry + go_type: encoding/json.RawMessage + - column: v_instrument.projects + go_type: + type: IDSlugName + slice: true + - column: v_instrument.telemetry + go_type: + type: IDSlugName + slice: true + + # v_ipi_measurement + - column: v_ipi_measurement.measurements + go_type: + type: IpiMeasurement + slice: true + + # v_plot_configuration + - column: v_plot_configuration.report_configs + go_type: + type: IDSlugName + slice: true + + # v_profile + - column: v_profile.tokens + go_type: + type: VProfileToken + slice: true + + # v_saa_measurement + - column: v_saa_measurement.measurements + go_type: + type: SaaMeasurement + slice: true + + # v_report_config + - column: v_report_config.plot_configs + go_type: + type: IDSlugName + slice: true + - column: v_report_config.global_overrides + go_type: + type: ReportConfigGlobalOverrides