diff --git a/api/Dockerfile b/api/Dockerfile index eb9f3aad..357e3c99 100644 --- a/api/Dockerfile +++ b/api/Dockerfile @@ -22,6 +22,8 @@ RUN --mount=type=cache,target=/root/.cache/go-build \ FROM ${BASE_IMAGE} ARG BUILD_TAG ENV BUILD_TAG=${BUILD_TAG} +ENV ZONEINFO=/usr/share/zoneinfo +COPY --from=builder /usr/share/zoneinfo /usr/share/zoneinfo COPY --from=builder /go/bin/app /go/bin/app COPY --from=builder /etc/ssl/certs/ca-certificates.crt /etc/ssl/certs/ca-certificates.crt ENTRYPOINT ["/go/bin/app"] diff --git a/api/go.mod b/api/go.mod index 917c7209..99ec8766 100644 --- a/api/go.mod +++ b/api/go.mod @@ -6,6 +6,7 @@ toolchain go1.23.0 require ( github.com/Knetic/govaluate v3.0.1-0.20171022003610-9aa49832a739+incompatible + github.com/araddon/dateparse v0.0.0-20210429162001-6b43995a97de github.com/aws/aws-lambda-go v1.47.0 github.com/aws/aws-sdk-go-v2 v1.30.5 github.com/aws/aws-sdk-go-v2/config v1.27.33 @@ -26,8 +27,10 @@ require ( github.com/stretchr/testify v1.9.0 github.com/tidwall/btree v1.7.0 github.com/xeipuuv/gojsonschema v1.2.0 - golang.org/x/crypto v0.27.0 + github.com/xuri/excelize/v2 v2.9.0 + golang.org/x/crypto v0.28.0 golang.org/x/image v0.20.0 + golang.org/x/text v0.19.0 ) require ( @@ -57,16 +60,20 @@ require ( github.com/labstack/gommon v0.4.2 // indirect github.com/mattn/go-colorable v0.1.13 // indirect github.com/mattn/go-isatty v0.0.20 // indirect + github.com/mohae/deepcopy v0.0.0-20170929034955-c48cc78d4826 // indirect github.com/pmezard/go-difflib v1.0.0 // indirect + github.com/richardlehane/mscfb v1.0.4 // indirect + github.com/richardlehane/msoleps v1.0.4 // indirect github.com/rogpeppe/go-internal v1.11.0 // indirect github.com/valyala/bytebufferpool v1.0.0 // indirect github.com/valyala/fasttemplate v1.2.2 // indirect github.com/xeipuuv/gojsonpointer v0.0.0-20180127040702-4e3ac2762d5f // indirect github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 // indirect - golang.org/x/net v0.29.0 // indirect + github.com/xuri/efp v0.0.0-20240408161823-9ad904a10d6d // indirect + github.com/xuri/nfp v0.0.0-20240318013403-ab9948c2c4a7 // indirect + golang.org/x/net v0.30.0 // indirect golang.org/x/sync v0.8.0 // indirect - golang.org/x/sys v0.25.0 // indirect - golang.org/x/text v0.18.0 // indirect + golang.org/x/sys v0.26.0 // indirect golang.org/x/time v0.6.0 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect ) diff --git a/api/go.sum b/api/go.sum index 77f8e251..b7152e5b 100644 --- a/api/go.sum +++ b/api/go.sum @@ -4,6 +4,8 @@ github.com/Knetic/govaluate v3.0.1-0.20171022003610-9aa49832a739+incompatible h1 github.com/Knetic/govaluate v3.0.1-0.20171022003610-9aa49832a739+incompatible/go.mod h1:r7JcOSlj0wfOMncg0iLm8Leh48TZaKVeNIfJntJ2wa0= github.com/Masterminds/semver/v3 v3.1.1/go.mod h1:VPu/7SZ7ePZ3QOrcuXROw5FAcLl4a0cBrbBpGY/8hQs= github.com/aead/siphash v1.0.1/go.mod h1:Nywa3cDsYNNK3gaciGTWPwHt0wlpNV15vwmswBAUSII= +github.com/araddon/dateparse v0.0.0-20210429162001-6b43995a97de h1:FxWPpzIjnTlhPwqqXc4/vE0f7GvRjuAsbW+HOIe8KnA= +github.com/araddon/dateparse v0.0.0-20210429162001-6b43995a97de/go.mod h1:DCaWoUhZrYW9p1lxo/cm8EmUOOzAPSEZNGF2DK1dJgw= github.com/aws/aws-lambda-go v1.47.0 h1:0H8s0vumYx/YKs4sE7YM0ktwL2eWse+kfopsRI1sXVI= github.com/aws/aws-lambda-go v1.47.0/go.mod h1:dpMpZgvWx5vuQJfBt0zqBha60q7Dd7RfgJv23DymV8A= github.com/aws/aws-sdk-go-v2 v1.30.5 h1:mWSRTwQAb0aLE17dSzztCVJWI9+cRMgqebndjwDyK0g= @@ -178,7 +180,10 @@ github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Ky github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM= github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY= github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= +github.com/mattn/go-runewidth v0.0.10/go.mod h1:RAqKPSqVFrSLVXbA8x7dzmKdmGzieGRCM46jaSJTDAk= github.com/mattn/go-sqlite3 v1.14.22/go.mod h1:Uh1q+B4BYcTPb+yiD3kU8Ct7aC0hY9fxUwlHK0RXw+Y= +github.com/mohae/deepcopy v0.0.0-20170929034955-c48cc78d4826 h1:RWengNIwukTxcDr9M+97sNutRR1RKhG96O6jWumTTnw= +github.com/mohae/deepcopy v0.0.0-20170929034955-c48cc78d4826/go.mod h1:TaXosZuwdSHYgviHp1DAtfrULt5eUgsSMsZf+YrPgl8= github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= github.com/onsi/ginkgo v1.7.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= github.com/onsi/gomega v1.4.3/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY= @@ -186,6 +191,12 @@ github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsK github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/richardlehane/mscfb v1.0.4 h1:WULscsljNPConisD5hR0+OyZjwK46Pfyr6mPu5ZawpM= +github.com/richardlehane/mscfb v1.0.4/go.mod h1:YzVpcZg9czvAuhk9T+a3avCpcFPMUWm7gK3DypaEsUk= +github.com/richardlehane/msoleps v1.0.1/go.mod h1:BWev5JBpU9Ko2WAgmZEuiz4/u3ZYTKbjLycmwiWUfWg= +github.com/richardlehane/msoleps v1.0.4 h1:WuESlvhX3gH2IHcd8UqyCuFY5yiq/GR/yqaSM/9/g00= +github.com/richardlehane/msoleps v1.0.4/go.mod h1:BWev5JBpU9Ko2WAgmZEuiz4/u3ZYTKbjLycmwiWUfWg= +github.com/rivo/uniseg v0.1.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc= github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= github.com/rogpeppe/go-internal v1.9.0/go.mod h1:WtVeX8xhTBvf0smdhujwtBcq4Qrzq/fJaraNFVN+nFs= github.com/rogpeppe/go-internal v1.11.0 h1:cWPaGQEPrBb5/AsnsZesgZZ9yb1OQ+GOISoDNXVBh4M= @@ -194,6 +205,7 @@ github.com/rs/xid v1.2.1/go.mod h1:+uKXf+4Djp6Md1KODXJxgGQPKngRmWyn10oCKFzNHOQ= github.com/rs/zerolog v1.13.0/go.mod h1:YbFCdg8HfsridGWAh22vktObvhZbQsZXe4/zB0OKkWU= github.com/rs/zerolog v1.15.0/go.mod h1:xYTKnLHcpfU2225ny5qZjxnj9NvkumZYjJHlAThCjNc= github.com/satori/go.uuid v1.2.0/go.mod h1:dA0hQrYB0VpLJoorglMZABFdXlWrHn1NEOzdhQKdks0= +github.com/scylladb/termtables v0.0.0-20191203121021-c4c0b6d42ff4/go.mod h1:C1a7PQSMz9NShzorzCiG2fk9+xuCgLkPeCvMHYR2OWg= github.com/shopspring/decimal v0.0.0-20180709203117-cd690d0c9e24/go.mod h1:M+9NzErvs504Cn4c5DxATwIqPbtswREoFCre64PpcG4= github.com/shopspring/decimal v1.2.0/go.mod h1:DKyhrW/HYNuLGql+MJL6WCR6knT2jwCFRcu2hWCYk4o= github.com/sirupsen/logrus v1.4.1/go.mod h1:ni0Sbl8bgC9z8RoU9G6nDWqqs/fq4eDPysMBDgk/93Q= @@ -225,6 +237,12 @@ github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 h1:EzJWgHo github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415/go.mod h1:GwrjFmJcFw6At/Gs6z4yjiIwzuJ1/+UwLxMQDVQXShQ= github.com/xeipuuv/gojsonschema v1.2.0 h1:LhYJRs+L4fBtjZUfuSZIKGeVu0QRy8e5Xi7D17UxZ74= github.com/xeipuuv/gojsonschema v1.2.0/go.mod h1:anYRn/JVcOK2ZgGU+IjEV4nwlhoK5sQluxsYJ78Id3Y= +github.com/xuri/efp v0.0.0-20240408161823-9ad904a10d6d h1:llb0neMWDQe87IzJLS4Ci7psK/lVsjIS2otl+1WyRyY= +github.com/xuri/efp v0.0.0-20240408161823-9ad904a10d6d/go.mod h1:ybY/Jr0T0GTCnYjKqmdwxyxn2BQf2RcQIIvex5QldPI= +github.com/xuri/excelize/v2 v2.9.0 h1:1tgOaEq92IOEumR1/JfYS/eR0KHOCsRv/rYXXh6YJQE= +github.com/xuri/excelize/v2 v2.9.0/go.mod h1:uqey4QBZ9gdMeWApPLdhm9x+9o2lq4iVmjiLfBS5hdE= +github.com/xuri/nfp v0.0.0-20240318013403-ab9948c2c4a7 h1:hPVCafDV85blFTabnqKgNhDCkJX25eik94Si9cTER4A= +github.com/xuri/nfp v0.0.0-20240318013403-ab9948c2c4a7/go.mod h1:WwHg+CVyzlv/TX9xqBFXEZAuxOPxn2k1GNHwG41IIUQ= github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= github.com/zenazn/goji v0.9.0/go.mod h1:7S9M489iMyHBNxwZnk9/EHS098H4/F6TATF2mIxtB1Q= go.uber.org/atomic v1.3.2/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= @@ -252,8 +270,8 @@ golang.org/x/crypto v0.0.0-20210711020723-a769d52b0f97/go.mod h1:GvvjBRRGRdwPK5y golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU= golang.org/x/crypto v0.20.0/go.mod h1:Xwo95rrVNIoSMx9wa1JroENMToLWn3RNVrTBpLHgZPQ= -golang.org/x/crypto v0.27.0 h1:GXm2NjJrPaiv/h1tb2UH8QfgC/hOf/+z0p6PT8o1w7A= -golang.org/x/crypto v0.27.0/go.mod h1:1Xngt8kV6Dvbssa53Ziq6Eqn0HqbZi5Z6R0ZpwQzt70= +golang.org/x/crypto v0.28.0 h1:GBDwsMXVQi34v5CCYUm2jkJvu4cbtru2U4TN2PSyQnw= +golang.org/x/crypto v0.28.0/go.mod h1:rmgy+3RHxRZMyY0jjAJShp2zgEdOqj2AO7U0pYmeQ7U= golang.org/x/image v0.20.0 h1:7cVCUjQwfL18gyBJOmYvptfSHS8Fb3YUDtfLIZ7Nbpw= golang.org/x/image v0.20.0/go.mod h1:0a88To4CYVBAHp5FXJm8o7QbUl37Vd85ply1vyD8auM= golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= @@ -271,8 +289,8 @@ golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg= golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44= -golang.org/x/net v0.29.0 h1:5ORfpBpCs4HzDYoodCDBbwHzdR5UrLBZ3sOnUJmFoHo= -golang.org/x/net v0.29.0/go.mod h1:gLkgy8jTGERgjzMic6DS9+SP0ajcu6Xu3Orq/SpETg0= +golang.org/x/net v0.30.0 h1:AcW1SDZMkb8IpzCdQUaIq2sP4sZ4zw+55h6ynffypl4= +golang.org/x/net v0.30.0/go.mod h1:2wGyMJ5iFasEhkwi13ChkO/t1ECNC4X4eBKkVFyYFlU= golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= @@ -299,8 +317,8 @@ golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= -golang.org/x/sys v0.25.0 h1:r+8e+loiHxRqhXVl6ML1nO3l1+oFoWbnlu2Ehimmi34= -golang.org/x/sys v0.25.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.26.0 h1:KHjCJyddX0LoSTb3J+vWpupP9p0oznkqVk/IfjymZbo= +golang.org/x/sys v0.26.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= @@ -316,8 +334,8 @@ golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= -golang.org/x/text v0.18.0 h1:XvMDiNzPAl0jr17s6W9lcaIhGUfUORdGCNsuLmPG224= -golang.org/x/text v0.18.0/go.mod h1:BuEKDfySbSR4drPmRPG/7iBdf8hvFMuRexcpahXilzY= +golang.org/x/text v0.19.0 h1:kTxAhCbGbxhK0IwgSKiMO5awPoDQ0RpfiVYBfK860YM= +golang.org/x/text v0.19.0/go.mod h1:BuEKDfySbSR4drPmRPG/7iBdf8hvFMuRexcpahXilzY= golang.org/x/time v0.6.0 h1:eTDhh4ZXt5Qf0augr54TN6suAUudPcawVZeIAPU7D4U= golang.org/x/time v0.6.0/go.mod h1:3BpzKBy/shNhVucY/MWOyx10tF3SFh9QdLuxbVysPQM= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= diff --git a/api/internal/config/api.go b/api/internal/config/api.go index c50d9a5b..0c92e527 100644 --- a/api/internal/config/api.go +++ b/api/internal/config/api.go @@ -3,6 +3,8 @@ package config import ( "log" "os" + + "github.com/USACE/instrumentation-api/api/internal/tz" ) // Config stores configuration information stored in environment variables @@ -34,5 +36,11 @@ func NewApiConfig() *ApiConfig { default: } + zz, err := tz.ListTimezones("/usr/share/zoneinfo/") + if err != nil { + log.Fatal(err) + } + cfg.AvailableTimezones = zz + return &cfg } diff --git a/api/internal/config/server.go b/api/internal/config/server.go index e5bf6087..7b012ee2 100644 --- a/api/internal/config/server.go +++ b/api/internal/config/server.go @@ -12,4 +12,5 @@ type ServerConfig struct { RequestLoggerEnabled bool `env:"REQUEST_LOGGER_ENABLED"` RoutePrefix string `env:"ROUTE_PREFIX"` ServerBaseUrl string `env:"SERVER_BASE_URL"` + AvailableTimezones []string } diff --git a/api/internal/db/batch.go b/api/internal/db/batch.go index 77d8026a..69cbf423 100644 --- a/api/internal/db/batch.go +++ b/api/internal/db/batch.go @@ -12,7 +12,6 @@ import ( "github.com/google/uuid" "github.com/jackc/pgx/v5" - "github.com/jackc/pgx/v5/pgtype" ) var ( @@ -167,6 +166,127 @@ func (b *DataloggerErrorCreateBatchBatchResults) Close() error { return b.br.Close() } +const equivalencyTableCreateOrUpdateBatch = `-- name: EquivalencyTableCreateOrUpdateBatch :batchexec +insert into datalogger_equivalency_table +(datalogger_id, datalogger_table_id, field_name, display_name, instrument_id, timeseries_id) +values ($1, $2, $3, $4, $5, $6) +on conflict on constraint datalogger_equivalency_table_datalogger_table_id_field_name_key +do update set display_name = excluded.display_name, instrument_id = excluded.instrument_id, timeseries_id = excluded.timeseries_id +` + +type EquivalencyTableCreateOrUpdateBatchBatchResults struct { + br pgx.BatchResults + tot int + closed bool +} + +type EquivalencyTableCreateOrUpdateBatchParams struct { + DataloggerID uuid.UUID `json:"datalogger_id"` + DataloggerTableID *uuid.UUID `json:"datalogger_table_id"` + FieldName string `json:"field_name"` + DisplayName *string `json:"display_name"` + InstrumentID *uuid.UUID `json:"instrument_id"` + TimeseriesID *uuid.UUID `json:"timeseries_id"` +} + +func (q *Queries) EquivalencyTableCreateOrUpdateBatch(ctx context.Context, arg []EquivalencyTableCreateOrUpdateBatchParams) *EquivalencyTableCreateOrUpdateBatchBatchResults { + batch := &pgx.Batch{} + for _, a := range arg { + vals := []interface{}{ + a.DataloggerID, + a.DataloggerTableID, + a.FieldName, + a.DisplayName, + a.InstrumentID, + a.TimeseriesID, + } + batch.Queue(equivalencyTableCreateOrUpdateBatch, vals...) + } + br := q.db.SendBatch(ctx, batch) + return &EquivalencyTableCreateOrUpdateBatchBatchResults{br, len(arg), false} +} + +func (b *EquivalencyTableCreateOrUpdateBatchBatchResults) Exec(f func(int, error)) { + defer b.br.Close() + for t := 0; t < b.tot; t++ { + if b.closed { + if f != nil { + f(t, ErrBatchAlreadyClosed) + } + continue + } + _, err := b.br.Exec() + if f != nil { + f(t, err) + } + } +} + +func (b *EquivalencyTableCreateOrUpdateBatchBatchResults) Close() error { + b.closed = true + return b.br.Close() +} + +const equivalencyTableUpdateBatch = `-- name: EquivalencyTableUpdateBatch :batchexec +update datalogger_equivalency_table set + field_name = $2, + display_name = $3, + instrument_id = $4, + timeseries_id = $5 +where id = $1 +` + +type EquivalencyTableUpdateBatchBatchResults struct { + br pgx.BatchResults + tot int + closed bool +} + +type EquivalencyTableUpdateBatchParams struct { + ID uuid.UUID `json:"id"` + FieldName string `json:"field_name"` + DisplayName *string `json:"display_name"` + InstrumentID *uuid.UUID `json:"instrument_id"` + TimeseriesID *uuid.UUID `json:"timeseries_id"` +} + +func (q *Queries) EquivalencyTableUpdateBatch(ctx context.Context, arg []EquivalencyTableUpdateBatchParams) *EquivalencyTableUpdateBatchBatchResults { + batch := &pgx.Batch{} + for _, a := range arg { + vals := []interface{}{ + a.ID, + a.FieldName, + a.DisplayName, + a.InstrumentID, + a.TimeseriesID, + } + batch.Queue(equivalencyTableUpdateBatch, vals...) + } + br := q.db.SendBatch(ctx, batch) + return &EquivalencyTableUpdateBatchBatchResults{br, len(arg), false} +} + +func (b *EquivalencyTableUpdateBatchBatchResults) Exec(f func(int, error)) { + defer b.br.Close() + for t := 0; t < b.tot; t++ { + if b.closed { + if f != nil { + f(t, ErrBatchAlreadyClosed) + } + continue + } + _, err := b.br.Exec() + if f != nil { + f(t, err) + } + } +} + +func (b *EquivalencyTableUpdateBatchBatchResults) Close() error { + b.closed = true + return b.br.Close() +} + const evaluationInstrumentCreateBatch = `-- name: EvaluationInstrumentCreateBatch :batchexec insert into evaluation_instrument (evaluation_id, instrument_id) values ($1,$2) ` @@ -456,55 +576,6 @@ func (b *InclSegmentUpdateBatchBatchResults) Close() error { return b.br.Close() } -const instrumentConstantCreateBatch = `-- name: InstrumentConstantCreateBatch :batchexec -insert into instrument_constants (instrument_id, timeseries_id) values ($1, $2) -` - -type InstrumentConstantCreateBatchBatchResults struct { - br pgx.BatchResults - tot int - closed bool -} - -type InstrumentConstantCreateBatchParams struct { - InstrumentID uuid.UUID `json:"instrument_id"` - TimeseriesID uuid.UUID `json:"timeseries_id"` -} - -func (q *Queries) InstrumentConstantCreateBatch(ctx context.Context, arg []InstrumentConstantCreateBatchParams) *InstrumentConstantCreateBatchBatchResults { - batch := &pgx.Batch{} - for _, a := range arg { - vals := []interface{}{ - a.InstrumentID, - a.TimeseriesID, - } - batch.Queue(instrumentConstantCreateBatch, vals...) - } - br := q.db.SendBatch(ctx, batch) - return &InstrumentConstantCreateBatchBatchResults{br, len(arg), false} -} - -func (b *InstrumentConstantCreateBatchBatchResults) Exec(f func(int, error)) { - defer b.br.Close() - for t := 0; t < b.tot; t++ { - if b.closed { - if f != nil { - f(t, ErrBatchAlreadyClosed) - } - continue - } - _, err := b.br.Exec() - if f != nil { - f(t, err) - } - } -} - -func (b *InstrumentConstantCreateBatchBatchResults) Close() error { - b.closed = true - return b.br.Close() -} - const instrumentCreateBatch = `-- name: InstrumentCreateBatch :batchone insert into instrument (slug, name, type_id, geometry, station, station_offset, created_by, created_at, nid_id, usgs_id, show_cwms_tab) values ( @@ -1699,6 +1770,112 @@ func (b *SaaSegmentUpdateBatchBatchResults) Close() error { return b.br.Close() } +const survey123EquivalencyTableRowCreateOrUpdateBatch = `-- name: Survey123EquivalencyTableRowCreateOrUpdateBatch :batchexec +insert into survey123_equivalency_table (survey123_id, field_name, display_name, instrument_id, timeseries_id) values ($1, $2, $3, $4, $5) +on conflict on constraint survey123_equivalency_table_survey123_id_survey123_deleted_field_name_key do update set +display_name=excluded.display_name, instrument_id=excluded.instrument_id, timeseries_id=excluded.timeseries_id +` + +type Survey123EquivalencyTableRowCreateOrUpdateBatchBatchResults struct { + br pgx.BatchResults + tot int + closed bool +} + +type Survey123EquivalencyTableRowCreateOrUpdateBatchParams struct { + Survey123ID uuid.UUID `json:"survey123_id"` + FieldName string `json:"field_name"` + DisplayName *string `json:"display_name"` + InstrumentID *uuid.UUID `json:"instrument_id"` + TimeseriesID *uuid.UUID `json:"timeseries_id"` +} + +func (q *Queries) Survey123EquivalencyTableRowCreateOrUpdateBatch(ctx context.Context, arg []Survey123EquivalencyTableRowCreateOrUpdateBatchParams) *Survey123EquivalencyTableRowCreateOrUpdateBatchBatchResults { + batch := &pgx.Batch{} + for _, a := range arg { + vals := []interface{}{ + a.Survey123ID, + a.FieldName, + a.DisplayName, + a.InstrumentID, + a.TimeseriesID, + } + batch.Queue(survey123EquivalencyTableRowCreateOrUpdateBatch, vals...) + } + br := q.db.SendBatch(ctx, batch) + return &Survey123EquivalencyTableRowCreateOrUpdateBatchBatchResults{br, len(arg), false} +} + +func (b *Survey123EquivalencyTableRowCreateOrUpdateBatchBatchResults) Exec(f func(int, error)) { + defer b.br.Close() + for t := 0; t < b.tot; t++ { + if b.closed { + if f != nil { + f(t, ErrBatchAlreadyClosed) + } + continue + } + _, err := b.br.Exec() + if f != nil { + f(t, err) + } + } +} + +func (b *Survey123EquivalencyTableRowCreateOrUpdateBatchBatchResults) Close() error { + b.closed = true + return b.br.Close() +} + +const survey123PayloadErrorCreateBatch = `-- name: Survey123PayloadErrorCreateBatch :batchexec +insert into survey123_payload_error (survey123_id, error_message) values ($1, $2) +` + +type Survey123PayloadErrorCreateBatchBatchResults struct { + br pgx.BatchResults + tot int + closed bool +} + +type Survey123PayloadErrorCreateBatchParams struct { + Survey123ID uuid.UUID `json:"survey123_id"` + ErrorMessage *string `json:"error_message"` +} + +func (q *Queries) Survey123PayloadErrorCreateBatch(ctx context.Context, arg []Survey123PayloadErrorCreateBatchParams) *Survey123PayloadErrorCreateBatchBatchResults { + batch := &pgx.Batch{} + for _, a := range arg { + vals := []interface{}{ + a.Survey123ID, + a.ErrorMessage, + } + batch.Queue(survey123PayloadErrorCreateBatch, vals...) + } + br := q.db.SendBatch(ctx, batch) + return &Survey123PayloadErrorCreateBatchBatchResults{br, len(arg), false} +} + +func (b *Survey123PayloadErrorCreateBatchBatchResults) Exec(f func(int, error)) { + defer b.br.Close() + for t := 0; t < b.tot; t++ { + if b.closed { + if f != nil { + f(t, ErrBatchAlreadyClosed) + } + continue + } + _, err := b.br.Exec() + if f != nil { + f(t, err) + } + } +} + +func (b *Survey123PayloadErrorCreateBatchBatchResults) Close() error { + b.closed = true + return b.br.Close() +} + const timeseriesCreateBatch = `-- name: TimeseriesCreateBatch :batchone insert into timeseries (instrument_id, slug, name, parameter_id, unit_id, type) values ($1, slugify($2, 'timeseries'), $2, $3, $4, $5) @@ -1884,61 +2061,6 @@ func (b *TimeseriesMeasurementCreateBatchBatchResults) Close() error { return b.br.Close() } -const timeseriesMeasurementCreateOrUpdateAtTimezoneBatch = `-- name: TimeseriesMeasurementCreateOrUpdateAtTimezoneBatch :batchexec -insert into timeseries_measurement (timeseries_id, time, value) -values ($1, (($3::timestamp at time zone $2::text) at time zone 'UTC')::timestamptz, $4) -on conflict on constraint timeseries_unique_time do update set value = excluded.value -` - -type TimeseriesMeasurementCreateOrUpdateAtTimezoneBatchBatchResults struct { - br pgx.BatchResults - tot int - closed bool -} - -type TimeseriesMeasurementCreateOrUpdateAtTimezoneBatchParams struct { - TimeseriesID uuid.UUID `json:"timeseries_id"` - Timezone string `json:"timezone"` - LocalTime pgtype.Timestamp `json:"local_time"` - Value float64 `json:"value"` -} - -func (q *Queries) TimeseriesMeasurementCreateOrUpdateAtTimezoneBatch(ctx context.Context, arg []TimeseriesMeasurementCreateOrUpdateAtTimezoneBatchParams) *TimeseriesMeasurementCreateOrUpdateAtTimezoneBatchBatchResults { - batch := &pgx.Batch{} - for _, a := range arg { - vals := []interface{}{ - a.TimeseriesID, - a.Timezone, - a.LocalTime, - a.Value, - } - batch.Queue(timeseriesMeasurementCreateOrUpdateAtTimezoneBatch, vals...) - } - br := q.db.SendBatch(ctx, batch) - return &TimeseriesMeasurementCreateOrUpdateAtTimezoneBatchBatchResults{br, len(arg), false} -} - -func (b *TimeseriesMeasurementCreateOrUpdateAtTimezoneBatchBatchResults) Exec(f func(int, error)) { - defer b.br.Close() - for t := 0; t < b.tot; t++ { - if b.closed { - if f != nil { - f(t, ErrBatchAlreadyClosed) - } - continue - } - _, err := b.br.Exec() - if f != nil { - f(t, err) - } - } -} - -func (b *TimeseriesMeasurementCreateOrUpdateAtTimezoneBatchBatchResults) Close() error { - b.closed = true - return b.br.Close() -} - const timeseriesMeasurementCreateOrUpdateBatch = `-- name: TimeseriesMeasurementCreateOrUpdateBatch :batchexec insert into timeseries_measurement (timeseries_id, time, value) values ($1, $2, $3) on conflict on constraint timeseries_unique_time do update set value = excluded.value @@ -2147,65 +2269,6 @@ func (b *TimeseriesNoteCreateBatchBatchResults) Close() error { return b.br.Close() } -const timeseriesNoteCreateOrUpdateAtTimezoneBatch = `-- name: TimeseriesNoteCreateOrUpdateAtTimezoneBatch :batchexec -insert into timeseries_notes (timeseries_id, time, masked, validated, annotation) -values ($1, (($3::timestamp at time zone $2::text) at time zone 'UTC')::timestamptz, $4, $5, $6) -on conflict on constraint notes_unique_time do nothing -` - -type TimeseriesNoteCreateOrUpdateAtTimezoneBatchBatchResults struct { - br pgx.BatchResults - tot int - closed bool -} - -type TimeseriesNoteCreateOrUpdateAtTimezoneBatchParams struct { - TimeseriesID uuid.UUID `json:"timeseries_id"` - Timezone string `json:"timezone"` - LocalTime pgtype.Timestamp `json:"local_time"` - Masked *bool `json:"masked"` - Validated *bool `json:"validated"` - Annotation *string `json:"annotation"` -} - -func (q *Queries) TimeseriesNoteCreateOrUpdateAtTimezoneBatch(ctx context.Context, arg []TimeseriesNoteCreateOrUpdateAtTimezoneBatchParams) *TimeseriesNoteCreateOrUpdateAtTimezoneBatchBatchResults { - batch := &pgx.Batch{} - for _, a := range arg { - vals := []interface{}{ - a.TimeseriesID, - a.Timezone, - a.LocalTime, - a.Masked, - a.Validated, - a.Annotation, - } - batch.Queue(timeseriesNoteCreateOrUpdateAtTimezoneBatch, vals...) - } - br := q.db.SendBatch(ctx, batch) - return &TimeseriesNoteCreateOrUpdateAtTimezoneBatchBatchResults{br, len(arg), false} -} - -func (b *TimeseriesNoteCreateOrUpdateAtTimezoneBatchBatchResults) Exec(f func(int, error)) { - defer b.br.Close() - for t := 0; t < b.tot; t++ { - if b.closed { - if f != nil { - f(t, ErrBatchAlreadyClosed) - } - continue - } - _, err := b.br.Exec() - if f != nil { - f(t, err) - } - } -} - -func (b *TimeseriesNoteCreateOrUpdateAtTimezoneBatchBatchResults) Close() error { - b.closed = true - return b.br.Close() -} - const timeseriesNoteCreateOrUpdateBatch = `-- name: TimeseriesNoteCreateOrUpdateBatch :batchexec insert into timeseries_notes (timeseries_id, time, masked, validated, annotation) values ($1, $2, $3, $4, $5) on conflict on constraint notes_unique_time do update set masked = excluded.masked, validated = excluded.validated, annotation = excluded.annotation @@ -2363,7 +2426,8 @@ func (b *TimeseriesNoteDeleteRangeBatchBatchResults) Close() error { } const uploaderConfigMappingCreateBatch = `-- name: UploaderConfigMappingCreateBatch :batchexec -insert into uploader_config_mapping (uploader_config_id, field_name, timeseries_id) values ($1, $2, $3) +insert into uploader_config_mapping (uploader_config_id, field_name, instrument_field_name, timeseries_id) values +($1, $2, $3, $4) ` type UploaderConfigMappingCreateBatchBatchResults struct { @@ -2373,9 +2437,10 @@ type UploaderConfigMappingCreateBatchBatchResults struct { } type UploaderConfigMappingCreateBatchParams struct { - UploaderConfigID uuid.UUID `json:"uploader_config_id"` - FieldName string `json:"field_name"` - TimeseriesID *uuid.UUID `json:"timeseries_id"` + UploaderConfigID uuid.UUID `json:"uploader_config_id"` + FieldName string `json:"field_name"` + InstrumentFieldName *string `json:"instrument_field_name"` + TimeseriesID *uuid.UUID `json:"timeseries_id"` } func (q *Queries) UploaderConfigMappingCreateBatch(ctx context.Context, arg []UploaderConfigMappingCreateBatchParams) *UploaderConfigMappingCreateBatchBatchResults { @@ -2384,6 +2449,7 @@ func (q *Queries) UploaderConfigMappingCreateBatch(ctx context.Context, arg []Up vals := []interface{}{ a.UploaderConfigID, a.FieldName, + a.InstrumentFieldName, a.TimeseriesID, } batch.Queue(uploaderConfigMappingCreateBatch, vals...) diff --git a/api/internal/db/domain.sql_gen.go b/api/internal/db/domain.sql_gen.go index 382dce78..319d1aa6 100644 --- a/api/internal/db/domain.sql_gen.go +++ b/api/internal/db/domain.sql_gen.go @@ -61,39 +61,3 @@ func (q *Queries) DomainList(ctx context.Context) ([]VDomain, error) { } return items, nil } - -const pgTimezoneNamesList = `-- name: PgTimezoneNamesList :many -select name, abbrev, utc_offset::text, is_dst from pg_catalog.pg_timezone_names -` - -type PgTimezoneNamesListRow struct { - Name *string `json:"name"` - Abbrev *string `json:"abbrev"` - UtcOffset string `json:"utc_offset"` - IsDst *bool `json:"is_dst"` -} - -func (q *Queries) PgTimezoneNamesList(ctx context.Context) ([]PgTimezoneNamesListRow, error) { - rows, err := q.db.Query(ctx, pgTimezoneNamesList) - if err != nil { - return nil, err - } - defer rows.Close() - items := []PgTimezoneNamesListRow{} - for rows.Next() { - var i PgTimezoneNamesListRow - if err := rows.Scan( - &i.Name, - &i.Abbrev, - &i.UtcOffset, - &i.IsDst, - ); err != nil { - return nil, err - } - items = append(items, i) - } - if err := rows.Err(); err != nil { - return nil, err - } - return items, nil -} diff --git a/api/internal/db/equivalency_table.sql_gen.go b/api/internal/db/equivalency_table.sql_gen.go index 7f74b979..8297b2ce 100644 --- a/api/internal/db/equivalency_table.sql_gen.go +++ b/api/internal/db/equivalency_table.sql_gen.go @@ -22,35 +22,6 @@ func (q *Queries) DataloggerTableGetIsValid(ctx context.Context, id uuid.UUID) ( return not_exists, err } -const equivalencyTableCreateOrUpdate = `-- name: EquivalencyTableCreateOrUpdate :exec -insert into datalogger_equivalency_table -(datalogger_id, datalogger_table_id, field_name, display_name, instrument_id, timeseries_id) -values ($1, $2, $3, $4, $5, $6) -on conflict on constraint datalogger_equivalency_table_datalogger_table_id_field_name_key -do update set display_name = excluded.display_name, instrument_id = excluded.instrument_id, timeseries_id = excluded.timeseries_id -` - -type EquivalencyTableCreateOrUpdateParams struct { - DataloggerID uuid.UUID `json:"datalogger_id"` - DataloggerTableID *uuid.UUID `json:"datalogger_table_id"` - FieldName string `json:"field_name"` - DisplayName *string `json:"display_name"` - InstrumentID *uuid.UUID `json:"instrument_id"` - TimeseriesID *uuid.UUID `json:"timeseries_id"` -} - -func (q *Queries) EquivalencyTableCreateOrUpdate(ctx context.Context, arg EquivalencyTableCreateOrUpdateParams) error { - _, err := q.db.Exec(ctx, equivalencyTableCreateOrUpdate, - arg.DataloggerID, - arg.DataloggerTableID, - arg.FieldName, - arg.DisplayName, - arg.InstrumentID, - arg.TimeseriesID, - ) - return err -} - const equivalencyTableDelete = `-- name: EquivalencyTableDelete :exec delete from datalogger_equivalency_table where id = $1 ` @@ -90,48 +61,3 @@ func (q *Queries) EquivalencyTableGet(ctx context.Context, dataloggerTableID uui ) return i, err } - -const equivalencyTableTimeseriesGetIsValid = `-- name: EquivalencyTableTimeseriesGetIsValid :one -select not exists ( - select id from v_timeseries_computed - where id = $1 - union all - select timeseries_id from instrument_constants - where timeseries_id = $1 -) -` - -func (q *Queries) EquivalencyTableTimeseriesGetIsValid(ctx context.Context, id uuid.UUID) (bool, error) { - row := q.db.QueryRow(ctx, equivalencyTableTimeseriesGetIsValid, id) - var not_exists bool - err := row.Scan(¬_exists) - return not_exists, err -} - -const equivalencyTableUpdate = `-- name: EquivalencyTableUpdate :exec -update datalogger_equivalency_table set - field_name = $2, - display_name = $3, - instrument_id = $4, - timeseries_id = $5 -where id = $1 -` - -type EquivalencyTableUpdateParams struct { - ID uuid.UUID `json:"id"` - FieldName string `json:"field_name"` - DisplayName *string `json:"display_name"` - InstrumentID *uuid.UUID `json:"instrument_id"` - TimeseriesID *uuid.UUID `json:"timeseries_id"` -} - -func (q *Queries) EquivalencyTableUpdate(ctx context.Context, arg EquivalencyTableUpdateParams) error { - _, err := q.db.Exec(ctx, equivalencyTableUpdate, - arg.ID, - arg.FieldName, - arg.DisplayName, - arg.InstrumentID, - arg.TimeseriesID, - ) - return err -} diff --git a/api/internal/db/instrument_constant.sql_gen.go b/api/internal/db/instrument_constant.sql_gen.go deleted file mode 100644 index 7cf37ec8..00000000 --- a/api/internal/db/instrument_constant.sql_gen.go +++ /dev/null @@ -1,80 +0,0 @@ -// Code generated by sqlc. DO NOT EDIT. -// versions: -// sqlc v1.27.0 -// source: instrument_constant.sql - -package db - -import ( - "context" - - "github.com/google/uuid" -) - -const instrumentConstantCreate = `-- name: InstrumentConstantCreate :exec -insert into instrument_constants (instrument_id, timeseries_id) values ($1, $2) -` - -type InstrumentConstantCreateParams struct { - InstrumentID uuid.UUID `json:"instrument_id"` - TimeseriesID uuid.UUID `json:"timeseries_id"` -} - -func (q *Queries) InstrumentConstantCreate(ctx context.Context, arg InstrumentConstantCreateParams) error { - _, err := q.db.Exec(ctx, instrumentConstantCreate, arg.InstrumentID, arg.TimeseriesID) - return err -} - -const instrumentConstantDelete = `-- name: InstrumentConstantDelete :exec -delete from instrument_constants where instrument_id = $1 and timeseries_id = $2 -` - -type InstrumentConstantDeleteParams struct { - InstrumentID uuid.UUID `json:"instrument_id"` - TimeseriesID uuid.UUID `json:"timeseries_id"` -} - -func (q *Queries) InstrumentConstantDelete(ctx context.Context, arg InstrumentConstantDeleteParams) error { - _, err := q.db.Exec(ctx, instrumentConstantDelete, arg.InstrumentID, arg.TimeseriesID) - return err -} - -const instrumentConstantList = `-- name: InstrumentConstantList :many -select t.id, t.slug, t.name, t.type, t.is_computed, t.variable, t.instrument_id, t.instrument_slug, t.instrument, t.parameter_id, t.parameter, t.unit_id, t.unit from v_timeseries t -inner join instrument_constants ic on ic.timeseries_id = t.id -where ic.instrument_id = $1 -` - -func (q *Queries) InstrumentConstantList(ctx context.Context, instrumentID uuid.UUID) ([]VTimeseries, error) { - rows, err := q.db.Query(ctx, instrumentConstantList, instrumentID) - if err != nil { - return nil, err - } - defer rows.Close() - items := []VTimeseries{} - for rows.Next() { - var i VTimeseries - if err := rows.Scan( - &i.ID, - &i.Slug, - &i.Name, - &i.Type, - &i.IsComputed, - &i.Variable, - &i.InstrumentID, - &i.InstrumentSlug, - &i.Instrument, - &i.ParameterID, - &i.Parameter, - &i.UnitID, - &i.Unit, - ); err != nil { - return nil, err - } - items = append(items, i) - } - if err := rows.Err(); err != nil { - return nil, err - } - return items, nil -} diff --git a/api/internal/db/models.go b/api/internal/db/models.go index 64d99304..5dbe61f6 100644 --- a/api/internal/db/models.go +++ b/api/internal/db/models.go @@ -238,6 +238,7 @@ const ( UploaderConfigTypeCsv UploaderConfigType = "csv" UploaderConfigTypeDux UploaderConfigType = "dux" UploaderConfigTypeToa5 UploaderConfigType = "toa5" + UploaderConfigTypeXlsx UploaderConfigType = "xlsx" ) func (e *UploaderConfigType) Scan(src interface{}) error { @@ -566,11 +567,6 @@ type Instrument struct { ShowCwmsTab bool `json:"show_cwms_tab"` } -type InstrumentConstants struct { - TimeseriesID uuid.UUID `json:"timeseries_id"` - InstrumentID uuid.UUID `json:"instrument_id"` -} - type InstrumentGroup struct { ID uuid.UUID `json:"id"` Deleted bool `json:"deleted"` @@ -849,6 +845,38 @@ type SubmittalStatus struct { Name string `json:"name"` } +type Survey123 struct { + ID uuid.UUID `json:"id"` + ProjectID uuid.UUID `json:"project_id"` + Name string `json:"name"` + Slug string `json:"slug"` + CreatedAt time.Time `json:"created_at"` + UpdatedAt *time.Time `json:"updated_at"` + CreatedBy uuid.UUID `json:"created_by"` + UpdatedBy *uuid.UUID `json:"updated_by"` + Deleted bool `json:"deleted"` +} + +type Survey123EquivalencyTable struct { + Survey123ID uuid.UUID `json:"survey123_id"` + Survey123Deleted bool `json:"survey123_deleted"` + FieldName string `json:"field_name"` + DisplayName *string `json:"display_name"` + InstrumentID *uuid.UUID `json:"instrument_id"` + TimeseriesID *uuid.UUID `json:"timeseries_id"` +} + +type Survey123PayloadError struct { + Survey123ID uuid.UUID `json:"survey123_id"` + ErrorMessage *string `json:"error_message"` +} + +type Survey123Preview struct { + Survey123ID uuid.UUID `json:"survey123_id"` + Preview string `json:"preview"` + UpdatedAt time.Time `json:"updated_at"` +} + type TelemetryGoes struct { ID uuid.UUID `json:"id"` NesdisID string `json:"nesdis_id"` @@ -932,12 +960,17 @@ type UploaderConfig struct { ColumnOffset int32 `json:"column_offset"` RowOffset int32 `json:"row_offset"` DepthBasedInstrumentID *uuid.UUID `json:"depth_based_instrument_id"` + InstrumentFieldEnabled bool `json:"instrument_field_enabled"` + InstrumentField *string `json:"instrument_field"` + XlsxSheetName *string `json:"xlsx_sheet_name"` + PreferDayFirst bool `json:"prefer_day_first"` } type UploaderConfigMapping struct { - UploaderConfigID uuid.UUID `json:"uploader_config_id"` - FieldName string `json:"field_name"` - TimeseriesID *uuid.UUID `json:"timeseries_id"` + UploaderConfigID uuid.UUID `json:"uploader_config_id"` + FieldName string `json:"field_name"` + TimeseriesID *uuid.UUID `json:"timeseries_id"` + InstrumentFieldName *string `json:"instrument_field_name"` } type VAlert struct { @@ -1322,6 +1355,21 @@ type VSubmittal struct { WarningSent bool `json:"warning_sent"` } +type VSurvey123 struct { + ID uuid.UUID `json:"id"` + ProjectID uuid.UUID `json:"project_id"` + Name string `json:"name"` + Slug string `json:"slug"` + CreatedAt time.Time `json:"created_at"` + UpdatedAt *time.Time `json:"updated_at"` + CreatedBy uuid.UUID `json:"created_by"` + CreatedByUsername *string `json:"created_by_username"` + UpdatedBy *uuid.UUID `json:"updated_by"` + UpdatedByUsername *string `json:"updated_by_username"` + Fields []Survey123EquivalencyTableField `json:"fields"` + Errors interface{} `json:"errors"` +} + type VTimeseries struct { ID uuid.UUID `json:"id"` Slug string `json:"slug"` @@ -1434,4 +1482,8 @@ type VUploaderConfig struct { ColumnOffset int32 `json:"column_offset"` RowOffset int32 `json:"row_offset"` DepthBasedInstrumentID *uuid.UUID `json:"depth_based_instrument_id"` + InstrumentFieldEnabled bool `json:"instrument_field_enabled"` + InstrumentField *string `json:"instrument_field"` + XlsxSheetName *string `json:"xlsx_sheet_name"` + PreferDayFirst bool `json:"prefer_day_first"` } diff --git a/api/internal/db/overrides.go b/api/internal/db/overrides.go index fde5d3d9..0352a429 100644 --- a/api/internal/db/overrides.go +++ b/api/internal/db/overrides.go @@ -41,16 +41,16 @@ type DataloggerTableIDName struct { } type DomainGroupOpt struct { - ID uuid.UUID `json:"id" db:"id"` - Value string `json:"value" db:"value"` - Description *string `json:"description" db:"description"` + ID uuid.UUID `json:"id"` + Value string `json:"value"` + Description *string `json:"description"` } type CollectionGroupDetailsTimeseries struct { VTimeseries - LatestTime *time.Time `json:"latest_time" db:"latest_time"` - LatestValue *float32 `json:"latest_value" db:"latest_value"` - SortOrder int32 `json:"sort_order" db:"sort_order"` + LatestTime *time.Time `json:"latest_time"` + LatestValue *float32 `json:"latest_value"` + SortOrder int32 `json:"sort_order"` } type EmailAutocompleteResult struct { @@ -71,6 +71,10 @@ type InstrumentIDName struct { InstrumentName string `json:"instrument_name"` } +type Name struct { + Name string `json:"name"` +} + type IpiMeasurement struct { SegmentID int `json:"segment_id"` Tilt *float64 `json:"tilt"` @@ -118,6 +122,13 @@ type SaaMeasurement struct { Elevation *float64 `json:"elevation"` } +type Survey123EquivalencyTableField struct { + FieldName string `json:"field_name"` + DisplayName string `json:"display_name"` + InstrumentID *uuid.UUID `json:"instrument_id"` + TimeseriesID *uuid.UUID `json:"timeseries_id"` +} + type TextOption struct { Enabled bool `json:"enabled"` Value string `json:"value"` diff --git a/api/internal/db/querier.go b/api/internal/db/querier.go index fea94f1e..515292c9 100644 --- a/api/internal/db/querier.go +++ b/api/internal/db/querier.go @@ -89,12 +89,11 @@ type Querier interface { EmailAutocompleteList(ctx context.Context, arg EmailAutocompleteListParams) ([]EmailAutocompleteListRow, error) EmailDelete(ctx context.Context, id uuid.UUID) error EmailGetOrCreate(ctx context.Context, email string) (uuid.UUID, error) - EquivalencyTableCreateOrUpdate(ctx context.Context, arg EquivalencyTableCreateOrUpdateParams) error + EquivalencyTableCreateOrUpdateBatch(ctx context.Context, arg []EquivalencyTableCreateOrUpdateBatchParams) *EquivalencyTableCreateOrUpdateBatchBatchResults EquivalencyTableDelete(ctx context.Context, id uuid.UUID) error EquivalencyTableDeleteForDataloggerTable(ctx context.Context, dataloggerTableID *uuid.UUID) error EquivalencyTableGet(ctx context.Context, dataloggerTableID uuid.UUID) (VDataloggerEquivalencyTable, error) - EquivalencyTableTimeseriesGetIsValid(ctx context.Context, id uuid.UUID) (bool, error) - EquivalencyTableUpdate(ctx context.Context, arg EquivalencyTableUpdateParams) error + EquivalencyTableUpdateBatch(ctx context.Context, arg []EquivalencyTableUpdateBatchParams) *EquivalencyTableUpdateBatchBatchResults EvaluationCreate(ctx context.Context, arg EvaluationCreateParams) (uuid.UUID, error) EvaluationDelete(ctx context.Context, id uuid.UUID) error EvaluationGet(ctx context.Context, id uuid.UUID) (VEvaluation, error) @@ -119,10 +118,6 @@ type Querier interface { InclSegmentListForInstrument(ctx context.Context, instrumentID uuid.UUID) ([]VInclSegment, error) InclSegmentUpdate(ctx context.Context, arg InclSegmentUpdateParams) error InclSegmentUpdateBatch(ctx context.Context, arg []InclSegmentUpdateBatchParams) *InclSegmentUpdateBatchBatchResults - InstrumentConstantCreate(ctx context.Context, arg InstrumentConstantCreateParams) error - InstrumentConstantCreateBatch(ctx context.Context, arg []InstrumentConstantCreateBatchParams) *InstrumentConstantCreateBatchBatchResults - InstrumentConstantDelete(ctx context.Context, arg InstrumentConstantDeleteParams) error - InstrumentConstantList(ctx context.Context, instrumentID uuid.UUID) ([]VTimeseries, error) InstrumentCreate(ctx context.Context, arg InstrumentCreateParams) (InstrumentCreateRow, error) InstrumentCreateBatch(ctx context.Context, arg []InstrumentCreateBatchParams) *InstrumentCreateBatchBatchResults InstrumentDeleteFlag(ctx context.Context, arg InstrumentDeleteFlagParams) error @@ -163,7 +158,6 @@ type Querier interface { IpiSegmentListForInstrument(ctx context.Context, instrumentID uuid.UUID) ([]VIpiSegment, error) IpiSegmentUpdate(ctx context.Context, arg IpiSegmentUpdateParams) error IpiSegmentUpdateBatch(ctx context.Context, arg []IpiSegmentUpdateBatchParams) *IpiSegmentUpdateBatchBatchResults - PgTimezoneNamesList(ctx context.Context) ([]PgTimezoneNamesListRow, error) PlotBullseyeConfigCreate(ctx context.Context, arg PlotBullseyeConfigCreateParams) error PlotBullseyeConfigDelete(ctx context.Context, plotConfigID uuid.UUID) error PlotBullseyeConfigUpdate(ctx context.Context, arg PlotBullseyeConfigUpdateParams) error @@ -273,6 +267,17 @@ type Querier interface { SubmittalUpdateNextForAlertConfig(ctx context.Context, alertConfigID *uuid.UUID) (uuid.UUID, error) SubmittalUpdateVerifyMissing(ctx context.Context, id uuid.UUID) error SubmittalUpdateVerifyMissingForAlertConfig(ctx context.Context, alertConfigID *uuid.UUID) error + Survey123Create(ctx context.Context, arg Survey123CreateParams) (uuid.UUID, error) + Survey123EquivalencyTableRowCreateOrUpdateBatch(ctx context.Context, arg []Survey123EquivalencyTableRowCreateOrUpdateBatchParams) *Survey123EquivalencyTableRowCreateOrUpdateBatchBatchResults + Survey123EquivalencyTableRowDeleteForSurvey123(ctx context.Context, survey123ID uuid.UUID) error + Survey123EquivalencyTableRowList(ctx context.Context, survey123ID uuid.UUID) ([]Survey123EquivalencyTable, error) + Survey123ListForProject(ctx context.Context, projectID uuid.UUID) ([]VSurvey123, error) + Survey123PayloadErrorCreateBatch(ctx context.Context, arg []Survey123PayloadErrorCreateBatchParams) *Survey123PayloadErrorCreateBatchBatchResults + Survey123PayloadErrorDeleteForSurvey123(ctx context.Context, survey123ID uuid.UUID) error + Survey123PreviewCreateOrUpdate(ctx context.Context, arg Survey123PreviewCreateOrUpdateParams) error + Survey123PreviewGet(ctx context.Context, survey123ID uuid.UUID) (Survey123Preview, error) + Survey123SoftDelete(ctx context.Context, id uuid.UUID) error + Survey123Update(ctx context.Context, arg Survey123UpdateParams) error // the below queried are needed becuase the slug is currently used as the variable name, it would // be better if we used a generated column for this on the timeseries table, maybe converted to snake_case TimeseriesComputedCreate(ctx context.Context, arg TimeseriesComputedCreateParams) (uuid.UUID, error) @@ -291,14 +296,15 @@ type Querier interface { TimeseriesGet(ctx context.Context, id uuid.UUID) (VTimeseries, error) TimeseriesGetAllBelongToProject(ctx context.Context, arg TimeseriesGetAllBelongToProjectParams) (bool, error) TimeseriesGetExistsStored(ctx context.Context, id uuid.UUID) (bool, error) + TimeseriesGetIsStandard(ctx context.Context, timeseriesIds []uuid.UUID) (bool, error) TimeseriesListForInstrument(ctx context.Context, instrumentID uuid.UUID) ([]VTimeseries, error) TimeseriesListForInstrumentGroup(ctx context.Context, instrumentGroupID uuid.UUID) ([]VTimeseries, error) TimeseriesListForPlotConfig(ctx context.Context, plotConfigurationID *uuid.UUID) ([]VTimeseries, error) TimeseriesListForProject(ctx context.Context, projectID uuid.UUID) ([]VTimeseries, error) + TimeseriesListForTypeInstrument(ctx context.Context, arg TimeseriesListForTypeInstrumentParams) ([]VTimeseries, error) TimeseriesMeasurementCreate(ctx context.Context, arg TimeseriesMeasurementCreateParams) error TimeseriesMeasurementCreateBatch(ctx context.Context, arg []TimeseriesMeasurementCreateBatchParams) *TimeseriesMeasurementCreateBatchBatchResults TimeseriesMeasurementCreateOrUpdate(ctx context.Context, arg TimeseriesMeasurementCreateOrUpdateParams) error - TimeseriesMeasurementCreateOrUpdateAtTimezoneBatch(ctx context.Context, arg []TimeseriesMeasurementCreateOrUpdateAtTimezoneBatchParams) *TimeseriesMeasurementCreateOrUpdateAtTimezoneBatchBatchResults TimeseriesMeasurementCreateOrUpdateBatch(ctx context.Context, arg []TimeseriesMeasurementCreateOrUpdateBatchParams) *TimeseriesMeasurementCreateOrUpdateBatchBatchResults TimeseriesMeasurementDelete(ctx context.Context, arg TimeseriesMeasurementDeleteParams) error TimeseriesMeasurementDeleteBatch(ctx context.Context, arg []TimeseriesMeasurementDeleteBatchParams) *TimeseriesMeasurementDeleteBatchBatchResults @@ -309,7 +315,6 @@ type Querier interface { TimeseriesNoteCreate(ctx context.Context, arg TimeseriesNoteCreateParams) error TimeseriesNoteCreateBatch(ctx context.Context, arg []TimeseriesNoteCreateBatchParams) *TimeseriesNoteCreateBatchBatchResults TimeseriesNoteCreateOrUpdate(ctx context.Context, arg TimeseriesNoteCreateOrUpdateParams) error - TimeseriesNoteCreateOrUpdateAtTimezoneBatch(ctx context.Context, arg []TimeseriesNoteCreateOrUpdateAtTimezoneBatchParams) *TimeseriesNoteCreateOrUpdateAtTimezoneBatchBatchResults TimeseriesNoteCreateOrUpdateBatch(ctx context.Context, arg []TimeseriesNoteCreateOrUpdateBatchParams) *TimeseriesNoteCreateOrUpdateBatchBatchResults TimeseriesNoteDelete(ctx context.Context, arg TimeseriesNoteDeleteParams) error TimeseriesNoteDeleteBatch(ctx context.Context, arg []TimeseriesNoteDeleteBatchParams) *TimeseriesNoteDeleteBatchBatchResults diff --git a/api/internal/db/survey123.sql_gen.go b/api/internal/db/survey123.sql_gen.go new file mode 100644 index 00000000..1f575298 --- /dev/null +++ b/api/internal/db/survey123.sql_gen.go @@ -0,0 +1,177 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.27.0 +// source: survey123.sql + +package db + +import ( + "context" + "time" + + "github.com/google/uuid" +) + +const survey123Create = `-- name: Survey123Create :one +insert into survey123 (project_id, name, slug, created_by) values ($1, $2, slugify($2, 'survey123'), $3) returning id +` + +type Survey123CreateParams struct { + ProjectID uuid.UUID `json:"project_id"` + Name string `json:"name"` + CreatedBy uuid.UUID `json:"created_by"` +} + +func (q *Queries) Survey123Create(ctx context.Context, arg Survey123CreateParams) (uuid.UUID, error) { + row := q.db.QueryRow(ctx, survey123Create, arg.ProjectID, arg.Name, arg.CreatedBy) + var id uuid.UUID + err := row.Scan(&id) + return id, err +} + +const survey123EquivalencyTableRowDeleteForSurvey123 = `-- name: Survey123EquivalencyTableRowDeleteForSurvey123 :exec +delete from survey123_equivalency_table where survey123_id = $1 +` + +func (q *Queries) Survey123EquivalencyTableRowDeleteForSurvey123(ctx context.Context, survey123ID uuid.UUID) error { + _, err := q.db.Exec(ctx, survey123EquivalencyTableRowDeleteForSurvey123, survey123ID) + return err +} + +const survey123EquivalencyTableRowList = `-- name: Survey123EquivalencyTableRowList :many +select survey123_id, survey123_deleted, field_name, display_name, instrument_id, timeseries_id from survey123_equivalency_table where survey123_id = $1 +` + +func (q *Queries) Survey123EquivalencyTableRowList(ctx context.Context, survey123ID uuid.UUID) ([]Survey123EquivalencyTable, error) { + rows, err := q.db.Query(ctx, survey123EquivalencyTableRowList, survey123ID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []Survey123EquivalencyTable{} + for rows.Next() { + var i Survey123EquivalencyTable + if err := rows.Scan( + &i.Survey123ID, + &i.Survey123Deleted, + &i.FieldName, + &i.DisplayName, + &i.InstrumentID, + &i.TimeseriesID, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const survey123ListForProject = `-- name: Survey123ListForProject :many +select id, project_id, name, slug, created_at, updated_at, created_by, created_by_username, updated_by, updated_by_username, fields, errors from v_survey123 where project_id = $1 +` + +func (q *Queries) Survey123ListForProject(ctx context.Context, projectID uuid.UUID) ([]VSurvey123, error) { + rows, err := q.db.Query(ctx, survey123ListForProject, projectID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []VSurvey123{} + for rows.Next() { + var i VSurvey123 + if err := rows.Scan( + &i.ID, + &i.ProjectID, + &i.Name, + &i.Slug, + &i.CreatedAt, + &i.UpdatedAt, + &i.CreatedBy, + &i.CreatedByUsername, + &i.UpdatedBy, + &i.UpdatedByUsername, + &i.Fields, + &i.Errors, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const survey123PayloadErrorDeleteForSurvey123 = `-- name: Survey123PayloadErrorDeleteForSurvey123 :exec +delete from survey123_payload_error where survey123_id = $1 +` + +func (q *Queries) Survey123PayloadErrorDeleteForSurvey123(ctx context.Context, survey123ID uuid.UUID) error { + _, err := q.db.Exec(ctx, survey123PayloadErrorDeleteForSurvey123, survey123ID) + return err +} + +const survey123PreviewCreateOrUpdate = `-- name: Survey123PreviewCreateOrUpdate :exec +insert into survey123_preview (survey123_id, preview, updated_at) values ($1,$2,$3) +on conflict on constraint survey123_id_key do update set preview=excluded.preview, updated_at=excluded.updated_at +` + +type Survey123PreviewCreateOrUpdateParams struct { + Survey123ID uuid.UUID `json:"survey123_id"` + Preview string `json:"preview"` + UpdatedAt time.Time `json:"updated_at"` +} + +func (q *Queries) Survey123PreviewCreateOrUpdate(ctx context.Context, arg Survey123PreviewCreateOrUpdateParams) error { + _, err := q.db.Exec(ctx, survey123PreviewCreateOrUpdate, arg.Survey123ID, arg.Preview, arg.UpdatedAt) + return err +} + +const survey123PreviewGet = `-- name: Survey123PreviewGet :one +select p.survey123_id, p.preview, p.updated_at +from survey123_preview p +inner join survey123 s on p.survey123_id = s.id +where p.survey123_id = $1 +and not s.deleted +` + +func (q *Queries) Survey123PreviewGet(ctx context.Context, survey123ID uuid.UUID) (Survey123Preview, error) { + row := q.db.QueryRow(ctx, survey123PreviewGet, survey123ID) + var i Survey123Preview + err := row.Scan(&i.Survey123ID, &i.Preview, &i.UpdatedAt) + return i, err +} + +const survey123SoftDelete = `-- name: Survey123SoftDelete :exec +update survey123 set deleted = true where id = $1 +` + +func (q *Queries) Survey123SoftDelete(ctx context.Context, id uuid.UUID) error { + _, err := q.db.Exec(ctx, survey123SoftDelete, id) + return err +} + +const survey123Update = `-- name: Survey123Update :exec +update survey123 set name=$2, updated_by=$3, updated_at=$4 where id=$1 +` + +type Survey123UpdateParams struct { + ID uuid.UUID `json:"id"` + Name string `json:"name"` + UpdatedBy *uuid.UUID `json:"updated_by"` + UpdatedAt *time.Time `json:"updated_at"` +} + +func (q *Queries) Survey123Update(ctx context.Context, arg Survey123UpdateParams) error { + _, err := q.db.Exec(ctx, survey123Update, + arg.ID, + arg.Name, + arg.UpdatedBy, + arg.UpdatedAt, + ) + return err +} diff --git a/api/internal/db/timeseries.sql_gen.go b/api/internal/db/timeseries.sql_gen.go index a876f1bb..999d17b5 100644 --- a/api/internal/db/timeseries.sql_gen.go +++ b/api/internal/db/timeseries.sql_gen.go @@ -126,6 +126,21 @@ func (q *Queries) TimeseriesGetExistsStored(ctx context.Context, id uuid.UUID) ( return exists, err } +const timeseriesGetIsStandard = `-- name: TimeseriesGetIsStandard :one +select exists ( + select true from timeseries + where type = 'standard' + and id = any($1::uuid[]) +) +` + +func (q *Queries) TimeseriesGetIsStandard(ctx context.Context, timeseriesIds []uuid.UUID) (bool, error) { + row := q.db.QueryRow(ctx, timeseriesGetIsStandard, timeseriesIds) + var exists bool + err := row.Scan(&exists) + return exists, err +} + const timeseriesListForInstrument = `-- name: TimeseriesListForInstrument :many select id, slug, name, type, is_computed, variable, instrument_id, instrument_slug, instrument, parameter_id, parameter, unit_id, unit from v_timeseries where instrument_id = $1 @@ -285,6 +300,51 @@ func (q *Queries) TimeseriesListForProject(ctx context.Context, projectID uuid.U return items, nil } +const timeseriesListForTypeInstrument = `-- name: TimeseriesListForTypeInstrument :many +select id, slug, name, type, is_computed, variable, instrument_id, instrument_slug, instrument, parameter_id, parameter, unit_id, unit from v_timeseries +where instrument_id = $1 +and type = $2 +` + +type TimeseriesListForTypeInstrumentParams struct { + InstrumentID uuid.UUID `json:"instrument_id"` + Type TimeseriesType `json:"type"` +} + +func (q *Queries) TimeseriesListForTypeInstrument(ctx context.Context, arg TimeseriesListForTypeInstrumentParams) ([]VTimeseries, error) { + rows, err := q.db.Query(ctx, timeseriesListForTypeInstrument, arg.InstrumentID, arg.Type) + if err != nil { + return nil, err + } + defer rows.Close() + items := []VTimeseries{} + for rows.Next() { + var i VTimeseries + if err := rows.Scan( + &i.ID, + &i.Slug, + &i.Name, + &i.Type, + &i.IsComputed, + &i.Variable, + &i.InstrumentID, + &i.InstrumentSlug, + &i.Instrument, + &i.ParameterID, + &i.Parameter, + &i.UnitID, + &i.Unit, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + const timeseriesUpdate = `-- name: TimeseriesUpdate :exec update timeseries set name=$2, instrument_id=$3, parameter_id=$4, unit_id=$5 where id = $1 diff --git a/api/internal/db/uploader.sql_gen.go b/api/internal/db/uploader.sql_gen.go index 4d78ff94..1d79086c 100644 --- a/api/internal/db/uploader.sql_gen.go +++ b/api/internal/db/uploader.sql_gen.go @@ -31,12 +31,17 @@ insert into uploader_config ( comment_field, column_offset, row_offset, - depth_based_instrument_id + depth_based_instrument_id, + instrument_field_enabled, + instrument_field, + xlsx_sheet_name, + prefer_day_first ) values ( $1, $2, - slugify($3, 'uploader_config'), + slugify($2, 'uploader_config'), + $3, $4, $5, $6, @@ -51,7 +56,10 @@ values ( $15, $16, $17, - $18 + $18, + $19, + $20, + $21 ) returning id ` @@ -59,7 +67,6 @@ returning id type UploaderConfigCreateParams struct { ProjectID uuid.UUID `json:"project_id"` Name string `json:"name"` - Slug string `json:"slug"` Description string `json:"description"` CreatedAt time.Time `json:"created_at"` CreatedBy uuid.UUID `json:"created_by"` @@ -75,13 +82,16 @@ type UploaderConfigCreateParams struct { ColumnOffset int32 `json:"column_offset"` RowOffset int32 `json:"row_offset"` DepthBasedInstrumentID *uuid.UUID `json:"depth_based_instrument_id"` + InstrumentFieldEnabled bool `json:"instrument_field_enabled"` + InstrumentField *string `json:"instrument_field"` + XlsxSheetName *string `json:"xlsx_sheet_name"` + PreferDayFirst bool `json:"prefer_day_first"` } func (q *Queries) UploaderConfigCreate(ctx context.Context, arg UploaderConfigCreateParams) (uuid.UUID, error) { row := q.db.QueryRow(ctx, uploaderConfigCreate, arg.ProjectID, arg.Name, - arg.Slug, arg.Description, arg.CreatedAt, arg.CreatedBy, @@ -97,6 +107,10 @@ func (q *Queries) UploaderConfigCreate(ctx context.Context, arg UploaderConfigCr arg.ColumnOffset, arg.RowOffset, arg.DepthBasedInstrumentID, + arg.InstrumentFieldEnabled, + arg.InstrumentField, + arg.XlsxSheetName, + arg.PreferDayFirst, ) var id uuid.UUID err := row.Scan(&id) @@ -113,7 +127,7 @@ func (q *Queries) UploaderConfigDelete(ctx context.Context, id uuid.UUID) error } const uploaderConfigGet = `-- name: UploaderConfigGet :one -select id, project_id, slug, name, description, created_at, created_by, created_by_username, updated_by, updated_at, updated_by_username, type, tz_name, time_field, validated_field_enabled, validated_field, masked_field_enabled, masked_field, comment_field_enabled, comment_field, column_offset, row_offset, depth_based_instrument_id from v_uploader_config where id=$1 +select id, project_id, slug, name, description, created_at, created_by, created_by_username, updated_by, updated_at, updated_by_username, type, tz_name, time_field, validated_field_enabled, validated_field, masked_field_enabled, masked_field, comment_field_enabled, comment_field, column_offset, row_offset, depth_based_instrument_id, instrument_field_enabled, instrument_field, xlsx_sheet_name, prefer_day_first from v_uploader_config where id=$1 ` func (q *Queries) UploaderConfigGet(ctx context.Context, id uuid.UUID) (VUploaderConfig, error) { @@ -143,12 +157,16 @@ func (q *Queries) UploaderConfigGet(ctx context.Context, id uuid.UUID) (VUploade &i.ColumnOffset, &i.RowOffset, &i.DepthBasedInstrumentID, + &i.InstrumentFieldEnabled, + &i.InstrumentField, + &i.XlsxSheetName, + &i.PreferDayFirst, ) return i, err } const uploaderConfigListForProject = `-- name: UploaderConfigListForProject :many -select id, project_id, slug, name, description, created_at, created_by, created_by_username, updated_by, updated_at, updated_by_username, type, tz_name, time_field, validated_field_enabled, validated_field, masked_field_enabled, masked_field, comment_field_enabled, comment_field, column_offset, row_offset, depth_based_instrument_id from v_uploader_config where project_id=$1 +select id, project_id, slug, name, description, created_at, created_by, created_by_username, updated_by, updated_at, updated_by_username, type, tz_name, time_field, validated_field_enabled, validated_field, masked_field_enabled, masked_field, comment_field_enabled, comment_field, column_offset, row_offset, depth_based_instrument_id, instrument_field_enabled, instrument_field, xlsx_sheet_name, prefer_day_first from v_uploader_config where project_id=$1 ` func (q *Queries) UploaderConfigListForProject(ctx context.Context, projectID uuid.UUID) ([]VUploaderConfig, error) { @@ -184,6 +202,10 @@ func (q *Queries) UploaderConfigListForProject(ctx context.Context, projectID uu &i.ColumnOffset, &i.RowOffset, &i.DepthBasedInstrumentID, + &i.InstrumentFieldEnabled, + &i.InstrumentField, + &i.XlsxSheetName, + &i.PreferDayFirst, ); err != nil { return nil, err } @@ -205,7 +227,7 @@ func (q *Queries) UploaderConfigMappingDeleteForUploaderConfig(ctx context.Conte } const uploaderConfigMappingList = `-- name: UploaderConfigMappingList :many -select uploader_config_id, field_name, timeseries_id from uploader_config_mapping where uploader_config_id=$1 +select uploader_config_id, field_name, timeseries_id, instrument_field_name from uploader_config_mapping where uploader_config_id=$1 ` func (q *Queries) UploaderConfigMappingList(ctx context.Context, uploaderConfigID uuid.UUID) ([]UploaderConfigMapping, error) { @@ -217,7 +239,12 @@ func (q *Queries) UploaderConfigMappingList(ctx context.Context, uploaderConfigI items := []UploaderConfigMapping{} for rows.Next() { var i UploaderConfigMapping - if err := rows.Scan(&i.UploaderConfigID, &i.FieldName, &i.TimeseriesID); err != nil { + if err := rows.Scan( + &i.UploaderConfigID, + &i.FieldName, + &i.TimeseriesID, + &i.InstrumentFieldName, + ); err != nil { return nil, err } items = append(items, i) @@ -245,7 +272,11 @@ update uploader_config set comment_field=$14, column_offset=$15, row_offset=$16, - depth_based_instrument_id=$17 + depth_based_instrument_id=$17, + instrument_field_enabled=$18, + instrument_field=$19, + xlsx_sheet_name=$20, + prefer_day_first=$21 where id=$1 ` @@ -267,6 +298,10 @@ type UploaderConfigUpdateParams struct { ColumnOffset int32 `json:"column_offset"` RowOffset int32 `json:"row_offset"` DepthBasedInstrumentID *uuid.UUID `json:"depth_based_instrument_id"` + InstrumentFieldEnabled bool `json:"instrument_field_enabled"` + InstrumentField *string `json:"instrument_field"` + XlsxSheetName *string `json:"xlsx_sheet_name"` + PreferDayFirst bool `json:"prefer_day_first"` } func (q *Queries) UploaderConfigUpdate(ctx context.Context, arg UploaderConfigUpdateParams) error { @@ -288,6 +323,10 @@ func (q *Queries) UploaderConfigUpdate(ctx context.Context, arg UploaderConfigUp arg.ColumnOffset, arg.RowOffset, arg.DepthBasedInstrumentID, + arg.InstrumentFieldEnabled, + arg.InstrumentField, + arg.XlsxSheetName, + arg.PreferDayFirst, ) return err } diff --git a/api/internal/dto/survey123.go b/api/internal/dto/survey123.go new file mode 100644 index 00000000..4e06ad24 --- /dev/null +++ b/api/internal/dto/survey123.go @@ -0,0 +1,46 @@ +package dto + +import ( + "time" + + "github.com/google/uuid" +) + +type Survey123 struct { + ID uuid.UUID `json:"id" db:"id"` + ProjectID uuid.UUID `json:"project_id" db:"project_id"` + Name string `json:"name" db:"name"` + Slug string `json:"slug" db:"slug"` + Rows []Survey123EquivalencyTableRow `json:"rows" db:"fields"` + Errors []string `json:"errors" db:"errors"` + AuditInfo +} + +type Survey123EquivalencyTableRow struct { + FieldName string `json:"field_name" db:"field_name"` + DisplayName string `json:"display_name" db:"display_name"` + InstrumentID *uuid.UUID `json:"instrument_id" db:"instrument_id"` + TimeseriesID *uuid.UUID `json:"timeseries_id" db:"timeseries_id"` +} + +type Survey123Payload struct { + EventType string + Edits []Survey123Edits +} + +type Survey123Edits struct { + Adds []Survey123ApplyEdits `json:"adds,omitempty"` + Updates []Survey123ApplyEdits `json:"updates,omitempty"` +} + +type Survey123ApplyEdits struct { + Attributes map[string]interface{} `json:"attributes,omitempty"` + ObjectID interface{} `json:"objectId,omitempty"` + Geometry interface{} `json:"geometry,omitempty"` +} + +type Survey123Preview struct { + Survey123ID uuid.UUID `json:"survey123_id" db:"survey123_id"` + Preview string `json:"preview" db:"preview"` + UpdateDate *time.Time `json:"update_date" db:"update_date"` +} diff --git a/api/internal/dto/uploader.go b/api/internal/dto/uploader.go index f54ceb00..a3cf60e7 100644 --- a/api/internal/dto/uploader.go +++ b/api/internal/dto/uploader.go @@ -7,31 +7,36 @@ import ( type UploaderConfigType string const ( - CSV, DUX, TOA5 UploaderConfigType = "csv", "dux", "toa5" + CSV, DUX, TOA5, XLSX UploaderConfigType = "csv", "dux", "toa5", "xlsx" ) type UploaderConfig struct { - ID uuid.UUID `json:"id"` - ProjectID uuid.UUID `json:"project_id"` - Name string `json:"name"` - Slug string `json:"slug"` - Description string `json:"description"` - Type UploaderConfigType `json:"type"` - TzName string `json:"tz_name"` - TimeField string `json:"time_field"` - ValidatedFieldEnabled bool `json:"validated_field_enabled"` - ValidatedField *string `json:"validated_field"` - MaskedFieldEnabled bool `json:"masked_field_enabled"` - MaskedField *string `json:"masked_field"` - CommentFieldEnabled bool `json:"comment_field_enabled"` - CommentField *string `json:"comment_field"` - ColumnOffset int32 `json:"column_offset"` - RowOffset int32 `json:"row_offset"` + ID uuid.UUID `json:"id"` + ProjectID uuid.UUID `json:"project_id"` + Name string `json:"name"` + Slug string `json:"slug"` + Description string `json:"description"` + Type UploaderConfigType `json:"type"` + TzName string `json:"tz_name"` + TimeField string `json:"time_field"` + ValidatedFieldEnabled bool `json:"validated_field_enabled"` + ValidatedField *string `json:"validated_field"` + MaskedFieldEnabled bool `json:"masked_field_enabled"` + MaskedField *string `json:"masked_field"` + CommentFieldEnabled bool `json:"comment_field_enabled"` + CommentField *string `json:"comment_field"` + ColumnOffset int32 `json:"column_offset"` + RowOffset int32 `json:"row_offset"` + DepthBasedInstrumentID *uuid.UUID `json:"depth_based_instrument_id"` + InstrumentFieldEnabled bool `json:"instrument_field_enabled"` + InstrumentField *string `json:"instrument_field"` + XlsxSheetName *string `json:"xlsx_sheet_name"` AuditInfo } type UploaderConfigMapping struct { - UploaderConfigID uuid.UUID `json:"-" db:"uploader_config_id"` - FieldName string `json:"field_name" db:"field_name"` - TimeseriesID *uuid.UUID `json:"timeseries_id" db:"timeseries_id"` + UploaderConfigID uuid.UUID `json:"-" db:"uploader_config_id"` + FieldName string `json:"field_name" db:"field_name"` + TimeseriesID *uuid.UUID `json:"timeseries_id" db:"timeseries_id"` + InstrumentFieldName *string `json:"instrument_field_name"` } diff --git a/api/internal/handler/domain.go b/api/internal/handler/domain.go index 0a9c18ab..4be0779f 100644 --- a/api/internal/handler/domain.go +++ b/api/internal/handler/domain.go @@ -50,15 +50,11 @@ func (h *ApiHandler) GetDomainGroups(c echo.Context) error { // @Summary lists time zone options // @Tags domain // @Produce json -// @Success 200 {array} db.PgTimezoneNamesListRow +// @Success 200 {array} string // @Failure 400 {object} echo.HTTPError // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError // @Router /domains/timezones [get] func (h *ApiHandler) ListTimezoneOptions(c echo.Context) error { - dd, err := h.DBService.PgTimezoneNamesList(c.Request().Context()) - if err != nil { - return httperr.InternalServerError(err) - } - return c.JSON(http.StatusOK, dd) + return c.JSON(http.StatusOK, h.Config.AvailableTimezones) } diff --git a/api/internal/handler/domain_test.go b/api/internal/handler/domain_test.go index f3036752..83b0badb 100644 --- a/api/internal/handler/domain_test.go +++ b/api/internal/handler/domain_test.go @@ -37,7 +37,13 @@ func TestDomains(t *testing.T) { }, { Name: "GetDomainGroup", - URL: "/domains/group", + URL: "/domains/groups", + Method: http.MethodGet, + ExpectedStatus: http.StatusOK, + }, + { + Name: "ListTimezoneOptions", + URL: "/domains/timezones", Method: http.MethodGet, ExpectedStatus: http.StatusOK, }, diff --git a/api/internal/handler/instrument_constant.go b/api/internal/handler/instrument_constant.go deleted file mode 100644 index 481df1ab..00000000 --- a/api/internal/handler/instrument_constant.go +++ /dev/null @@ -1,106 +0,0 @@ -package handler - -import ( - "net/http" - - "github.com/USACE/instrumentation-api/api/internal/db" - "github.com/USACE/instrumentation-api/api/internal/dto" - "github.com/USACE/instrumentation-api/api/internal/httperr" - "github.com/google/uuid" - "github.com/labstack/echo/v4" -) - -// ListInstrumentConstants godoc -// -// @Summary lists constants for a given instrument -// @Tags instrument-constant -// @Produce json -// @Param project_id path string true "project uuid" Format(uuid) -// @Param instrument_id path string true "instrument uuid" Format(uuid) -// @Success 200 {array} db.VTimeseries -// @Failure 400 {object} echo.HTTPError -// @Failure 404 {object} echo.HTTPError -// @Failure 500 {object} echo.HTTPError -// @Router /projects/{project_id}/instruments/{instrument_id}/constants [get] -func (h *ApiHandler) ListInstrumentConstants(c echo.Context) error { - instID, err := uuid.Parse(c.Param("instrument_id")) - if err != nil { - return httperr.MalformedID(err) - } - cc, err := h.DBService.InstrumentConstantList(c.Request().Context(), instID) - if err != nil { - return httperr.InternalServerError(err) - } - return c.JSON(http.StatusOK, cc) -} - -// CreateInstrumentConstants godoc -// -// @Summary creates instrument constants (i.e. timeseries) -// @Tags instrument-constant -// @Produce json -// @Param project_id path string true "project uuid" Format(uuid) -// @Param instrument_id path string true "instrument uuid" Format(uuid) -// @Param timeseries_collection_items body dto.TimeseriesCollectionItems true "timeseries collection items payload" -// @Param key query string false "api key" -// @Success 200 {array} db.TimeseriesCreateBatchRow -// @Failure 400 {object} echo.HTTPError -// @Failure 404 {object} echo.HTTPError -// @Failure 500 {object} echo.HTTPError -// @Router /projects/{project_id}/instruments/{instrument_id}/constants [post] -// @Security Bearer -func (h *ApiHandler) CreateInstrumentConstants(c echo.Context) error { - ctx := c.Request().Context() - var tc dto.TimeseriesCollectionItems - if err := c.Bind(&tc); err != nil { - return httperr.MalformedBody(err) - } - instID, err := uuid.Parse(c.Param("instrument_id")) - if err != nil { - return httperr.MalformedID(err) - } - for idx := range tc.Items { - if instID != tc.Items[idx].InstrumentID { - return httperr.Message(http.StatusBadRequest, "all instrument ids in body must match query parameter") - } - } - tt, err := h.DBService.InstrumentConstantCreateBatch(ctx, tc.Items) - if err != nil { - return httperr.InternalServerError(err) - } - return c.JSON(http.StatusCreated, tt) -} - -// DeleteInstrumentConstant godoc -// -// @Summary removes a timeseries as an instrument constant -// @Tags instrument-constant -// @Produce json -// @Param project_id path string true "project uuid" Format(uuid) -// @Param instrument_id path string true "instrument uuid" Format(uuid) -// @Param timeseries_id path string true "timeseries uuid" Format(uuid) -// @Param key query string false "api key" -// @Success 200 {object} map[string]interface{} -// @Failure 400 {object} echo.HTTPError -// @Failure 404 {object} echo.HTTPError -// @Failure 500 {object} echo.HTTPError -// @Router /projects/{project_id}/instruments/{instrument_id}/constants/{timeseries_id} [delete] -// @Security Bearer -func (h *ApiHandler) DeleteInstrumentConstant(c echo.Context) error { - instID, err := uuid.Parse(c.Param("instrument_id")) - if err != nil { - return httperr.MalformedID(err) - } - tsID, err := uuid.Parse(c.Param("timeseries_id")) - if err != nil { - return httperr.MalformedID(err) - } - err = h.DBService.InstrumentConstantDelete(c.Request().Context(), db.InstrumentConstantDeleteParams{ - InstrumentID: instID, - TimeseriesID: tsID, - }) - if err != nil { - return httperr.InternalServerError(err) - } - return c.JSON(http.StatusOK, make(map[string]interface{})) -} diff --git a/api/internal/handler/instrument_constant_test.go b/api/internal/handler/instrument_constant_test.go deleted file mode 100644 index cc1ba5f8..00000000 --- a/api/internal/handler/instrument_constant_test.go +++ /dev/null @@ -1,70 +0,0 @@ -package handler_test - -import ( - "fmt" - "net/http" - "testing" - - "github.com/stretchr/testify/assert" - "github.com/xeipuuv/gojsonschema" -) - -const ( - testInstrumentConstantInstrumentID = "a7540f69-c41e-43b3-b655-6e44097edb7e" - testInstrumentConstantTimeseriesID1 = "22a734d6-dc24-451d-a462-43a32f335ae8" - testInstrumentConstantTimeseriesID2 = "14247bc8-b264-4857-836f-182d47ebb39d" -) - -const createInstrumentConstantBody = `{ - "name": "Test Instrument Constant", - "instrument_id": "a7540f69-c41e-43b3-b655-6e44097edb7e", - "parameter_id": "068b59b0-aafb-4c98-ae4b-ed0365a6fbac", - "unit_id": "f777f2e2-5e32-424e-a1ca-19d16cd8abce" -}` - -const updateInstrumentConstantBody = `{ - "id": "22a734d6-dc24-451d-a462-43a32f335ae8", - "name": "Tip Depth Updated Name", - "instrument_id": "a7540f69-c41e-43b3-b655-6e44097edb7e", - "parameter_id": "068b59b0-aafb-4c98-ae4b-ed0365a6fbac", - "unit_id": "f777f2e2-5e32-424e-a1ca-19d16cd8abce" -}` - -func TestInstrumentConstants(t *testing.T) { - objSchema, err := gojsonschema.NewSchema(timeseriesObjectLoader) - assert.Nil(t, err) - arrSchema, err := gojsonschema.NewSchema(timeseriesArrayLoader) - assert.Nil(t, err) - - tests := []HTTPTest{ - { - Name: "ListInstrumentConstants", - URL: fmt.Sprintf("/projects/%s/instruments/%s/constants", testProjectID, testInstrumentConstantInstrumentID), - Method: http.MethodGet, - ExpectedStatus: http.StatusOK, - ExpectedSchema: arrSchema, - }, - { - Name: "CreateInstrumentConstant", - URL: fmt.Sprintf("/projects/%s/instruments/%s/constants", testProjectID, testInstrumentConstantInstrumentID), - Method: http.MethodPost, - Body: createInstrumentConstantBody, - ExpectedStatus: http.StatusCreated, - ExpectedSchema: arrSchema, - }, - { - Name: "UpdateInstrumentConstant", - URL: fmt.Sprintf("/projects/%s/instruments/%s/constants/%s", testProjectID, testInstrumentConstantInstrumentID, testInstrumentConstantTimeseriesID1), - Method: http.MethodPut, - Body: updateInstrumentConstantBody, - ExpectedStatus: http.StatusOK, - ExpectedSchema: objSchema}, - { - Name: "DeleteInstrumentConstant", - URL: fmt.Sprintf("/projects/%s/instruments/%s/constants/%s", testProjectID, testInstrumentConstantInstrumentID, testInstrumentConstantTimeseriesID2), - Method: http.MethodDelete, - ExpectedStatus: http.StatusOK, - }} - - RunAll(t, tests) -} diff --git a/api/internal/handler/instrument_incl_test.go b/api/internal/handler/instrument_incl_test.go new file mode 100644 index 00000000..0eced311 --- /dev/null +++ b/api/internal/handler/instrument_incl_test.go @@ -0,0 +1,141 @@ +package handler_test + +import ( + "fmt" + "net/http" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/xeipuuv/gojsonschema" +) + +const inclSegmentArraySchema = `{ + "type": "array", + "properties": { + "id": { "type": "string" }, + "instrument_id": { "type": "string" }, + "depth_timeseries_id": { "type": [ "string", "null" ] }, + "a0_timeseries_id": { "type": [ "string", "null" ] }, + "a180_timeseries_id": { "type": [ "string", "null" ] }, + "b0_timeseries_id": { "type": [ "string", "null" ] }, + "b180_timeseries_id": { "type": [ "string", "null" ] } + }, + "additionalProperties": false +}` + +var inclSegmentArrayLoader = gojsonschema.NewStringLoader(inclSegmentArraySchema) + +const inclMeasurementsArraySchema = `{ + "type": "array", + "properties": { + "segment_id": { "type": "number" }, + "instrument_id": { "type": "string" }, + "depth": { "type": ["number", "null"] }, + "a0": { "type": ["number", "null"] }, + "a180": { "type": ["number", "null"] }, + "b0": { "type": ["number", "null"] }, + "b180": { "type": ["number", "null"] }, + "a_increment": { "type": ["number", "null"] }, + "a_cum_dev": { "type": ["number", "null"] }, + "a_checksum": { "type": ["number", "null"] }, + "a_comb": { "type": ["number", "null"] }, + "b_increment": { "type": ["number", "null"] }, + "b_cum_dev": { "type": ["number", "null"] }, + "b_checksum": { "type": ["number", "null"] }, + "b_comb": { "type": ["number", "null"] } + }, + "additionalProperties": false +}` + +var inclMeasurementsArrayLoader = gojsonschema.NewStringLoader(inclMeasurementsArraySchema) + +const ( + testInclInstrumentID = "f4a9c03b-9c52-4bb2-8b2b-103df48e6f9c" + testInclTimeAfter = "1900-01-01T00:00:00.00Z" + testInclTimeBefore = "2030-01-01T00:00:00.00Z" +) + +const updateInclSegmentsBody = `[ + { + "id": 1, + "instrument_id": "f4a9c03b-9c52-4bb2-8b2b-103df48e6f9c", + "depth_timeseries_id": "6d623d88-b6e8-4f0d-86be-d4445b6b6789", + "a0_timeseries_id": "40dfcce3-7f36-475e-969c-2b0b8633c856", + "a180_timeseries_id": "20792b7d-0f87-4f4b-81c6-616a8d76613a", + "a0_timeseries_id": "659d026e-3f47-4efe-899f-4129b5466228", + "a180_timeseries_id": "015a07f9-4005-4d2d-96dc-7f7d611ca51a" + }, + { + "id": 2, + "instrument_id": "f4a9c03b-9c52-4bb2-8b2b-103df48e6f9c", + "depth_timeseries_id": "1e3a1d3c-38e3-4f34-b65f-d2b8287ed591", + "a0_timeseries_id": "229d8f8d-fd60-465e-94c4-c25bc79d4f7e", + "a180_timeseries_id": "74d19174-f911-4234-96e4-fae1a49969e6", + "a0_timeseries_id": "fd7c720e-7119-45dc-bf7a-44da303a9aa4", + "a180_timeseries_id": "e3f7d76b-8aa3-4d25-a5f1-4ad715dd13c1" + } +]` + +const createInclInstrumentBulkBody = `[{ + "status_id": "94578354-ffdf-4119-9663-6bd4323e58f5", + "status": "destroyed", + "status_time": "2001-01-01T00:00:00Z", + "slug": "test-incl-1", + "name": "Test INCL 1", + "type_id": "3c3dfc23-ed2a-4a4a-9ce0-683c7c1d4d20", + "type": "Inclinometer", + "geometry": { + "type": "Point", + "coordinates": [ + -80.8, + 26.7 + ] + }, + "formula": null, + "station": -12.1, + "offset": 12.5, + "project_id": "5b6f4f37-7755-4cf9-bd02-94f1e9bc5984", + "opts": { + "num_segments": 3, + "bottom_elevation": 1000 + } +}]` + +func TestInclInstruments(t *testing.T) { + segArrSchema, err := gojsonschema.NewSchema(inclSegmentArrayLoader) + assert.Nil(t, err) + measurementsArrSchema, err := gojsonschema.NewSchema(inclMeasurementsArrayLoader) + assert.Nil(t, err) + + tests := []HTTPTest{ + { + Name: "ListInclSegmentsForInstrument", + URL: fmt.Sprintf("/instruments/incl/%s/segments", testInclInstrumentID), + Method: http.MethodGet, + ExpectedStatus: http.StatusOK, + ExpectedSchema: segArrSchema, + }, + { + Name: "GetInclMeasurementsForInstrument", + URL: fmt.Sprintf("/instruments/incl/%s/measurements?after=%s&before=%s", testInclInstrumentID, testInclTimeAfter, testInclTimeBefore), + Method: http.MethodGet, + ExpectedStatus: http.StatusOK, + ExpectedSchema: measurementsArrSchema, + }, + { + Name: "UpdateInclSegments", + URL: fmt.Sprintf("/instruments/incl/%s/segments", testInclInstrumentID), + Method: http.MethodGet, + ExpectedStatus: http.StatusOK, + ExpectedSchema: segArrSchema, + }, + { + Name: "CreateInclInstrumentBulk", + URL: fmt.Sprintf("/projects/%s/instruments", testProjectID), + Method: http.MethodPost, + Body: createInclInstrumentBulkBody, + ExpectedStatus: http.StatusCreated, + }} + + RunAll(t, tests) +} diff --git a/api/internal/handler/survey123.go b/api/internal/handler/survey123.go new file mode 100644 index 00000000..ba4198e9 --- /dev/null +++ b/api/internal/handler/survey123.go @@ -0,0 +1,175 @@ +package handler + +import ( + "net/http" + "time" + + "github.com/USACE/instrumentation-api/api/internal/db" + "github.com/USACE/instrumentation-api/api/internal/dto" + "github.com/USACE/instrumentation-api/api/internal/httperr" + "github.com/google/uuid" + "github.com/labstack/echo/v4" +) + +// ListSurvey123sForProject godoc +// +// @Summary lists Survey123 connections for a project +// @Tags survey123 +// @Produce json +// @Param project_id path string true "project uuid" Format(uuid) +// @Success 200 {array} dto.Survey123 +// @Failure 400 {object} echo.HTTPError +// @Failure 404 {object} echo.HTTPError +// @Failure 500 {object} echo.HTTPError +// @Router /projects/{project_id}/survey123 [get] +func (h *ApiHandler) ListSurvey123sForProject(c echo.Context) error { + projectID, err := uuid.Parse(c.Param("project_id")) + if err != nil { + return httperr.MalformedID(err) + } + + svv, err := h.DBService.Survey123ListForProject(c.Request().Context(), projectID) + if err != nil { + return httperr.InternalServerError(err) + } + + return c.JSON(http.StatusOK, svv) +} + +// GetSurvey123Preview godoc +// +// @Summary gets the most recent Survey123 raw json payload sent from the webhook API +// @Tags survey123 +// @Produce json +// @Param project_id path string true "project uuid" Format(uuid) +// @Param survey123_id path string true "survey123 uuid" Format(uuid) +// @Success 200 {object} string +// @Failure 400 {object} echo.HTTPError +// @Failure 404 {object} echo.HTTPError +// @Failure 500 {object} echo.HTTPError +// @Router /projects/{project_id}/survey123/{survey123_id}/previews [get] +func (h *ApiHandler) GetSurvey123Preview(c echo.Context) error { + _, err := uuid.Parse(c.Param("project_id")) + if err != nil { + return httperr.MalformedID(err) + } + survey123ID, err := uuid.Parse(c.Param("survey123_id")) + if err != nil { + return httperr.MalformedID(err) + } + + pv, err := h.DBService.Survey123PreviewGet(c.Request().Context(), survey123ID) + if err != nil { + return httperr.InternalServerError(err) + } + + return c.JSON(http.StatusOK, pv) +} + +// CreateSurvey123 godoc +// +// @Summary creates a Survey123 connection with equivalency table mappings +// @Tags survey123 +// @Accept json +// @Produce json +// @Param project_id path string true "project uuid" Format(uuid) +// @Param survey123 body dto.Survey123 true "survey123 payload" +// @Success 200 {object} map[string]uuid.UUID +// @Failure 400 {object} echo.HTTPError +// @Failure 404 {object} echo.HTTPError +// @Failure 500 {object} echo.HTTPError +// @Router /projects/{project_id}/survey123 [post] +// @Security Bearer +func (h *ApiHandler) CreateSurvey123(c echo.Context) error { + projectID, err := uuid.Parse(c.Param("project_id")) + if err != nil { + return httperr.MalformedID(err) + } + p := c.Get("profile").(db.VProfile) + + var sv dto.Survey123 + if err := c.Bind(&sv); err != nil { + return httperr.MalformedBody(err) + } + sv.ProjectID = projectID + sv.CreatedBy = p.ID + + newID, err := h.DBService.Survey123Create(c.Request().Context(), sv) + if err != nil { + return httperr.InternalServerError(err) + } + + return c.JSON(http.StatusCreated, map[string]uuid.UUID{"id": newID}) +} + +// UpdateSurvey123 godoc +// +// @Summary updates a Survey123 connection with equivalency table mappings +// @Tags survey123 +// @Accept json +// @Produce json +// @Param project_id path string true "project uuid" Format(uuid) +// @Param survey123_id path string true "survey123 uuid" Format(uuid) +// @Param survey123 body dto.Survey123 true "survey123 payload" +// @Success 200 {object} map[string]uuid.UUID +// @Failure 400 {object} echo.HTTPError +// @Failure 404 {object} echo.HTTPError +// @Failure 500 {object} echo.HTTPError +// @Router /projects/{project_id}/survey123/{survey123_id} [put] +// @Security Bearer +func (h *ApiHandler) UpdateSurvey123(c echo.Context) error { + projectID, err := uuid.Parse(c.Param("project_id")) + if err != nil { + return httperr.MalformedID(err) + } + survey123ID, err := uuid.Parse(c.Param("survey123_id")) + if err != nil { + return httperr.MalformedID(err) + } + var sv dto.Survey123 + if err := c.Bind(&sv); err != nil { + return httperr.MalformedBody(err) + } + sv.ProjectID = projectID + sv.ID = survey123ID + + p := c.Get("profile").(db.VProfile) + t := time.Now() + sv.UpdatedBy, sv.UpdatedAt = &p.ID, &t + + if err := h.DBService.Survey123Update(c.Request().Context(), sv); err != nil { + return httperr.InternalServerError(err) + } + + return c.JSON(http.StatusOK, map[string]uuid.UUID{"id": survey123ID}) +} + +// DeleteSurvey123 godoc +// +// @Summary deletes a Survey123 connection with equivalency table mappings +// @Tags survey123 +// @Produce json +// @Param project_id path string true "project uuid" Format(uuid) +// @Param survey123_id path string true "survey123 uuid" Format(uuid) +// @Success 200 {object} map[string]uuid.UUID +// @Failure 400 {object} echo.HTTPError +// @Failure 404 {object} echo.HTTPError +// @Failure 500 {object} echo.HTTPError +// @Router /projects/{project_id}/survey123/{survey123_id} [delete] +// @Security Bearer +func (h *ApiHandler) DeleteSurvey123(c echo.Context) error { + _, err := uuid.Parse(c.Param("project_id")) + if err != nil { + return httperr.MalformedID(err) + } + survey123ID, err := uuid.Parse(c.Param("survey123_id")) + if err != nil { + return httperr.MalformedID(err) + } + + if err := h.DBService.Survey123SoftDelete(c.Request().Context(), survey123ID); err != nil { + return httperr.InternalServerError(err) + } + + return c.JSON(http.StatusOK, map[string]uuid.UUID{"id": survey123ID}) +} diff --git a/api/internal/handler/survey123_telemetry.go b/api/internal/handler/survey123_telemetry.go new file mode 100644 index 00000000..1c52287e --- /dev/null +++ b/api/internal/handler/survey123_telemetry.go @@ -0,0 +1,62 @@ +package handler + +import ( + "encoding/json" + "net/http" + "time" + + "github.com/USACE/instrumentation-api/api/internal/db" + "github.com/USACE/instrumentation-api/api/internal/dto" + "github.com/USACE/instrumentation-api/api/internal/httperr" + "github.com/google/uuid" + "github.com/labstack/echo/v4" +) + +func (h *TelemetryHandler) CreateOrUpdateSurvey123Measurements(c echo.Context) error { + survey123ID, err := uuid.Parse(c.Param("survey123_id")) + if err != nil { + return httperr.MalformedID(err) + } + + var raw map[string]json.RawMessage + if err := c.Bind(&raw); err != nil { + return httperr.MalformedBody(err) + } + + preview, err := json.Marshal(raw) + if err != nil { + return httperr.MalformedBody(err) + } + + ctx := c.Request().Context() + if err := h.DBService.Survey123PreviewCreateOrUpdate(ctx, db.Survey123PreviewCreateOrUpdateParams{ + Survey123ID: survey123ID, + Preview: string(preview), + UpdatedAt: time.Now(), + }); err != nil { + return httperr.InternalServerError(err) + } + + eq, err := h.DBService.Survey123EquivalencyTableRowList(ctx, survey123ID) + if err != nil { + return httperr.ServerErrorOrNotFound(err) + } + + var et string + if err := json.Unmarshal(raw["eventType"], &et); err != nil { + return httperr.MalformedBody(err) + } + + var se []dto.Survey123Edits + if err := json.Unmarshal(raw["applyEdits"], &se); err != nil { + return httperr.MalformedBody(err) + } + + sp := dto.Survey123Payload{EventType: et, Edits: se} + + if err := h.DBService.Survey123MeasurementCreateOrUpdateBatch(ctx, survey123ID, sp, eq); err != nil { + return httperr.InternalServerError(err) + } + + return c.NoContent(http.StatusCreated) +} diff --git a/api/internal/handler/survey123_test.go b/api/internal/handler/survey123_test.go new file mode 100644 index 00000000..524b6df8 --- /dev/null +++ b/api/internal/handler/survey123_test.go @@ -0,0 +1,140 @@ +package handler_test + +import ( + "fmt" + "net/http" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/xeipuuv/gojsonschema" +) + +const survey123EquivalencyTableRowSchema = `{ + "type": "object", + "properties": { + "field_name": { "type": "string" }, + "display_name": { "type": "string" }, + "instrument_id": { "type": ["string", "null"] }, + "timeseries_id": { "type": ["string", "null"] } + } +}` + +var survey123Schema = fmt.Sprintf(`{ + "type": "object", + "properties": { + "id": { "type": "string" }, + "name": { "type": "string" }, + "project_id": { "type": "string" }, + "created_by": { "type": "string" }, + "created_by_username": { "type": "string" }, + "created_at": { "type": "string" }, + "updated_by": { "type": ["string", "null"] }, + "updated_by_username": { "type": ["string", "null"] }, + "updated_at": { "type": ["string", "null"] }, + "slug": { "type": "string" }, + "errors": { "type": "array", "items": { "type": "string" } }, + "fields": { "type": "array", "items": %s } + }, + "required": [ + "id", + "name", + "project_id", + "created_by", + "created_by_username", + "created_at", + "fields", + "slug", + "errors" + ] +}`, survey123EquivalencyTableRowSchema) + +var survey123ObjectLoader = gojsonschema.NewStringLoader(survey123Schema) +var survey123ArrayLoader = gojsonschema.NewStringLoader(fmt.Sprintf(`{ + "type": "array", + "items": %s +}`, survey123Schema)) + +const survey123PreviewSchema = `{ + "type": "object", + "properties": { + "survey123_id": { "type": "string" }, + "updated_at": { "type": "string" }, + "preview": { "type": "string" } + } +}` + +var survey123PreviewLoader = gojsonschema.NewStringLoader(survey123PreviewSchema) + +const ( + testSurvey123ID = "a2e19d85-4c64-4e99-b93a-4f4f56a718cf" +) + +const createSurvey123Body = `{ + "project_id": "5b6f4f37-7755-4cf9-bd02-94f1e9bc5984", + "name": "Test Create Survey123", + "rows": [] +}` + +const updateSurvey123Body = `{ + "id": "a2e19d85-4c64-4e99-b93a-4f4f56a718cf", + "name": "Updated name", + "rows": [ + { + "field_name": "test1__battery", + "display_name": "battery", + "instrument_id": "d8c66ef9-06f0-4d52-9233-f3778e0624f0", + "timeseries_id": "c3c00251-12fb-42a1-9d49-cdb269bb3039" + }, + { + "field_name": "test1__temperature", + "display_name": "temperature", + "instrument_id": "d8c66ef9-06f0-4d52-9233-f3778e0624f0", + "timeseries_id": "e45a9620-a431-4b70-af97-a4e185eb7311" + } + ] +}` + +func TestSurvey123(t *testing.T) { + arrSchema, err := gojsonschema.NewSchema(survey123ArrayLoader) + assert.Nil(t, err) + previewObjSchema, err := gojsonschema.NewSchema(survey123PreviewLoader) + assert.Nil(t, err) + + tests := []HTTPTest{ + { + Name: "CreateSurvey123", + URL: fmt.Sprintf("/projects/%s/survey123", testProjectID), + Method: http.MethodPost, + Body: createSurvey123Body, + ExpectedStatus: http.StatusCreated, + }, + { + Name: "ListSurvey123sForProject", + URL: fmt.Sprintf("/projects/%s/survey123", testProjectID), + Method: http.MethodGet, + ExpectedStatus: http.StatusOK, + ExpectedSchema: arrSchema, + }, + { + Name: "GetSurvey123Preview", + URL: fmt.Sprintf("/projects/%s/survey123/%s/previews", testProjectID, testSurvey123ID), + Method: http.MethodGet, + ExpectedStatus: http.StatusOK, + ExpectedSchema: previewObjSchema, + }, + { + Name: "UpdateSurvey123", + URL: fmt.Sprintf("/projects/%s/survey123/%s", testProjectID, testSurvey123ID), + Method: http.MethodPut, + Body: updateSurvey123Body, + ExpectedStatus: http.StatusOK, + }, + { + Name: "DeleteSurvey123", + URL: fmt.Sprintf("/projects/%s/survey123/%s", testProjectID, testSurvey123ID), + Method: http.MethodDelete, + ExpectedStatus: http.StatusOK, + }} + + RunAll(t, tests) +} diff --git a/api/internal/handler/timeseries.go b/api/internal/handler/timeseries.go index 69982df0..107977d2 100644 --- a/api/internal/handler/timeseries.go +++ b/api/internal/handler/timeseries.go @@ -36,7 +36,7 @@ func (h *ApiHandler) GetTimeseries(c echo.Context) error { return c.JSON(http.StatusOK, t) } -// ListInstrumentTimeseries godoc +// ListTimeseriesForInstrument godoc // // @Summary lists timeseries for an instrument // @Tags timeseries @@ -48,7 +48,7 @@ func (h *ApiHandler) GetTimeseries(c echo.Context) error { // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError // @Router /projects/{project_id}/instruments/{instrument_id}/timeseries [get] -func (h *ApiHandler) ListInstrumentTimeseries(c echo.Context) error { +func (h *ApiHandler) ListTimeseriesForInstrument(c echo.Context) error { nID, err := uuid.Parse(c.Param("instrument_id")) if err != nil { return httperr.MalformedID(err) @@ -83,7 +83,7 @@ func (h *ApiHandler) ListInstrumentGroupTimeseries(c echo.Context) error { return c.JSON(http.StatusOK, tt) } -// ListProjectTimeseries godoc +// ListTimeseriesForProject godoc // // @Summary lists all timeseries for a single project // @Tags timeseries @@ -94,7 +94,7 @@ func (h *ApiHandler) ListInstrumentGroupTimeseries(c echo.Context) error { // @Failure 404 {object} echo.HTTPError // @Failure 500 {object} echo.HTTPError // @Router /projects/{project_id}/timeseries [get] -func (h *ApiHandler) ListProjectTimeseries(c echo.Context) error { +func (h *ApiHandler) ListTimeseriesForProject(c echo.Context) error { pID, err := uuid.Parse(c.Param("project_id")) if err != nil { return httperr.MalformedID(err) diff --git a/api/internal/handler/uploader_test.go b/api/internal/handler/uploader_test.go new file mode 100644 index 00000000..6ba90b67 --- /dev/null +++ b/api/internal/handler/uploader_test.go @@ -0,0 +1,3 @@ +package handler_test + +// TODO diff --git a/api/internal/server/api.go b/api/internal/server/api.go index e66b6de3..8fecd4fc 100644 --- a/api/internal/server/api.go +++ b/api/internal/server/api.go @@ -95,7 +95,7 @@ func (r *ApiServer) RegisterRoutes(h *handler.ApiHandler) { r.private.POST("/my_alerts/:alert_id/read", h.DoAlertRead) r.private.POST("/my_alerts/:alert_id/unread", h.DoAlertUnread) - //AlertConfig + // AlertConfig r.public.GET("/projects/:project_id/alert_configs", h.ListAlertConfigsForProject) r.public.GET("/projects/:project_id/instruments/:instrument_id/alert_configs", h.ListInstrumentAlertConfigs) r.public.GET("/projects/:project_id/alert_configs/:alert_config_id", h.GetAlertConfig) @@ -189,12 +189,6 @@ func (r *ApiServer) RegisterRoutes(h *handler.ApiHandler) { r.private.PUT("/projects/:project_id/instruments/:instrument_id/assignments", h.UpdateInstrumentProjectAssignments) r.private.PUT("/projects/:project_id/instruments/assignments", h.UpdateProjectInstrumentAssignments) - // InstrumentConstant - r.public.GET("/projects/:project_id/instruments/:instrument_id/constants", h.ListInstrumentConstants) - r.private.POST("/projects/:project_id/instruments/:instrument_id/constants", h.CreateInstrumentConstants) - r.private.PUT("/projects/:project_id/instruments/:instrument_id/constants/:timeseries_id", h.UpdateTimeseries) - r.private.DELETE("/projects/:project_id/instruments/:instrument_id/constants/:timeseries_id", h.DeleteInstrumentConstant) - // InstrumentGroup r.public.GET("/instrument_groups", h.ListInstrumentGroups) r.public.GET("/instrument_groups/:instrument_group_id", h.GetInstrumentGroup) @@ -319,12 +313,19 @@ func (r *ApiServer) RegisterRoutes(h *handler.ApiHandler) { r.private.PUT("/submittals/:submittal_id/verify_missing", h.VerifyMissingSubmittal) r.private.PUT("/alert_configs/:alert_config_id/submittals/verify_missing", h.VerifyMissingAlertConfigSubmittals) + // Survey123 + r.public.GET("/projects/:project_id/survey123", h.ListSurvey123sForProject) + r.public.GET("/projects/:project_id/survey123/:survey123_id/previews", h.GetSurvey123Preview) + r.private.POST("/projects/:project_id/survey123", h.CreateSurvey123) + r.private.PUT("/projects/:project_id/survey123/:survey123_id", h.UpdateSurvey123) + r.private.DELETE("/projects/:project_id/survey123/:survey123_id", h.DeleteSurvey123) + // Timeseries // TODO: Delete timeseries endpoints without project context in URL r.public.GET("/timeseries/:timeseries_id", h.GetTimeseries) r.public.GET("/instruments/:instrument_id/timeseries/:timeseries_id", h.GetTimeseries) - r.public.GET("/projects/:project_id/timeseries", h.ListProjectTimeseries) - r.public.GET("/projects/:project_id/instruments/:instrument_id/timeseries", h.ListInstrumentTimeseries) + r.public.GET("/projects/:project_id/timeseries", h.ListTimeseriesForProject) + r.public.GET("/projects/:project_id/instruments/:instrument_id/timeseries", h.ListTimeseriesForInstrument) r.private.POST("/timeseries", h.CreateTimeseries) r.private.PUT("/timeseries/:timeseries_id", h.UpdateTimeseries) diff --git a/api/internal/server/docs/openapi.json b/api/internal/server/docs/openapi.json index 494be041..ecd3432e 100644 --- a/api/internal/server/docs/openapi.json +++ b/api/internal/server/docs/openapi.json @@ -1553,7 +1553,7 @@ "application/json" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/db.PgTimezoneNamesListRow" + "type" : "string" }, "type" : "array" } @@ -8000,253 +8000,6 @@ "x-codegen-request-body-name" : "project_ids" } }, - "/projects/{project_id}/instruments/{instrument_id}/constants" : { - "get" : { - "parameters" : [ { - "description" : "project uuid", - "in" : "path", - "name" : "project_id", - "required" : true, - "schema" : { - "format" : "uuid", - "type" : "string" - } - }, { - "description" : "instrument uuid", - "in" : "path", - "name" : "instrument_id", - "required" : true, - "schema" : { - "format" : "uuid", - "type" : "string" - } - } ], - "responses" : { - "200" : { - "content" : { - "application/json" : { - "schema" : { - "items" : { - "$ref" : "#/components/schemas/db.VTimeseries" - }, - "type" : "array" - } - } - }, - "description" : "OK" - }, - "400" : { - "content" : { - "application/json" : { - "schema" : { - "$ref" : "#/components/schemas/echo.HTTPError" - } - } - }, - "description" : "Bad Request" - }, - "404" : { - "content" : { - "application/json" : { - "schema" : { - "$ref" : "#/components/schemas/echo.HTTPError" - } - } - }, - "description" : "Not Found" - }, - "500" : { - "content" : { - "application/json" : { - "schema" : { - "$ref" : "#/components/schemas/echo.HTTPError" - } - } - }, - "description" : "Internal Server Error" - } - }, - "summary" : "lists constants for a given instrument", - "tags" : [ "instrument-constant" ] - }, - "post" : { - "parameters" : [ { - "description" : "project uuid", - "in" : "path", - "name" : "project_id", - "required" : true, - "schema" : { - "format" : "uuid", - "type" : "string" - } - }, { - "description" : "instrument uuid", - "in" : "path", - "name" : "instrument_id", - "required" : true, - "schema" : { - "format" : "uuid", - "type" : "string" - } - }, { - "description" : "api key", - "in" : "query", - "name" : "key", - "schema" : { - "type" : "string" - } - } ], - "requestBody" : { - "content" : { - "*/*" : { - "schema" : { - "$ref" : "#/components/schemas/dto.TimeseriesCollectionItems" - } - } - }, - "description" : "timeseries collection items payload", - "required" : true - }, - "responses" : { - "200" : { - "content" : { - "application/json" : { - "schema" : { - "items" : { - "$ref" : "#/components/schemas/db.TimeseriesCreateBatchRow" - }, - "type" : "array" - } - } - }, - "description" : "OK" - }, - "400" : { - "content" : { - "application/json" : { - "schema" : { - "$ref" : "#/components/schemas/echo.HTTPError" - } - } - }, - "description" : "Bad Request" - }, - "404" : { - "content" : { - "application/json" : { - "schema" : { - "$ref" : "#/components/schemas/echo.HTTPError" - } - } - }, - "description" : "Not Found" - }, - "500" : { - "content" : { - "application/json" : { - "schema" : { - "$ref" : "#/components/schemas/echo.HTTPError" - } - } - }, - "description" : "Internal Server Error" - } - }, - "security" : [ { - "Bearer" : [ ] - } ], - "summary" : "creates instrument constants (i.e. timeseries)", - "tags" : [ "instrument-constant" ], - "x-codegen-request-body-name" : "timeseries_collection_items" - } - }, - "/projects/{project_id}/instruments/{instrument_id}/constants/{timeseries_id}" : { - "delete" : { - "parameters" : [ { - "description" : "project uuid", - "in" : "path", - "name" : "project_id", - "required" : true, - "schema" : { - "format" : "uuid", - "type" : "string" - } - }, { - "description" : "instrument uuid", - "in" : "path", - "name" : "instrument_id", - "required" : true, - "schema" : { - "format" : "uuid", - "type" : "string" - } - }, { - "description" : "timeseries uuid", - "in" : "path", - "name" : "timeseries_id", - "required" : true, - "schema" : { - "format" : "uuid", - "type" : "string" - } - }, { - "description" : "api key", - "in" : "query", - "name" : "key", - "schema" : { - "type" : "string" - } - } ], - "responses" : { - "200" : { - "content" : { - "application/json" : { - "schema" : { - "additionalProperties" : true, - "type" : "object" - } - } - }, - "description" : "OK" - }, - "400" : { - "content" : { - "application/json" : { - "schema" : { - "$ref" : "#/components/schemas/echo.HTTPError" - } - } - }, - "description" : "Bad Request" - }, - "404" : { - "content" : { - "application/json" : { - "schema" : { - "$ref" : "#/components/schemas/echo.HTTPError" - } - } - }, - "description" : "Not Found" - }, - "500" : { - "content" : { - "application/json" : { - "schema" : { - "$ref" : "#/components/schemas/echo.HTTPError" - } - } - }, - "description" : "Internal Server Error" - } - }, - "security" : [ { - "Bearer" : [ ] - } ], - "summary" : "removes a timeseries as an instrument constant", - "tags" : [ "instrument-constant" ] - } - }, "/projects/{project_id}/instruments/{instrument_id}/evaluations" : { "get" : { "parameters" : [ { @@ -11092,7 +10845,7 @@ "tags" : [ "submittal" ] } }, - "/projects/{project_id}/timeseries" : { + "/projects/{project_id}/survey123" : { "get" : { "parameters" : [ { "description" : "project uuid", @@ -11110,7 +10863,7 @@ "application/json" : { "schema" : { "items" : { - "$ref" : "#/components/schemas/db.VTimeseries" + "$ref" : "#/components/schemas/dto.Survey123" }, "type" : "array" } @@ -11149,11 +10902,9 @@ "description" : "Internal Server Error" } }, - "summary" : "lists all timeseries for a single project", - "tags" : [ "timeseries" ] - } - }, - "/projects/{project_id}/timeseries_measurements" : { + "summary" : "lists Survey123 connections for a project", + "tags" : [ "survey123" ] + }, "post" : { "parameters" : [ { "description" : "project uuid", @@ -11164,20 +10915,380 @@ "format" : "uuid", "type" : "string" } - }, { - "description" : "api key", - "in" : "query", - "name" : "key", - "schema" : { - "type" : "string" - } } ], "requestBody" : { "content" : { "application/json" : { "schema" : { - "items" : { - "$ref" : "#/components/schemas/dto.MeasurementCollection" + "$ref" : "#/components/schemas/dto.Survey123" + } + } + }, + "description" : "survey123 payload", + "required" : true + }, + "responses" : { + "200" : { + "content" : { + "application/json" : { + "schema" : { + "additionalProperties" : { + "type" : "string" + }, + "type" : "object" + } + } + }, + "description" : "OK" + }, + "400" : { + "content" : { + "application/json" : { + "schema" : { + "$ref" : "#/components/schemas/echo.HTTPError" + } + } + }, + "description" : "Bad Request" + }, + "404" : { + "content" : { + "application/json" : { + "schema" : { + "$ref" : "#/components/schemas/echo.HTTPError" + } + } + }, + "description" : "Not Found" + }, + "500" : { + "content" : { + "application/json" : { + "schema" : { + "$ref" : "#/components/schemas/echo.HTTPError" + } + } + }, + "description" : "Internal Server Error" + } + }, + "security" : [ { + "Bearer" : [ ] + } ], + "summary" : "creates a Survey123 connection with equivalency table mappings", + "tags" : [ "survey123" ], + "x-codegen-request-body-name" : "survey123" + } + }, + "/projects/{project_id}/survey123/{survey123_id}" : { + "delete" : { + "parameters" : [ { + "description" : "project uuid", + "in" : "path", + "name" : "project_id", + "required" : true, + "schema" : { + "format" : "uuid", + "type" : "string" + } + }, { + "description" : "survey123 uuid", + "in" : "path", + "name" : "survey123_id", + "required" : true, + "schema" : { + "format" : "uuid", + "type" : "string" + } + } ], + "responses" : { + "200" : { + "content" : { + "application/json" : { + "schema" : { + "additionalProperties" : { + "type" : "string" + }, + "type" : "object" + } + } + }, + "description" : "OK" + }, + "400" : { + "content" : { + "application/json" : { + "schema" : { + "$ref" : "#/components/schemas/echo.HTTPError" + } + } + }, + "description" : "Bad Request" + }, + "404" : { + "content" : { + "application/json" : { + "schema" : { + "$ref" : "#/components/schemas/echo.HTTPError" + } + } + }, + "description" : "Not Found" + }, + "500" : { + "content" : { + "application/json" : { + "schema" : { + "$ref" : "#/components/schemas/echo.HTTPError" + } + } + }, + "description" : "Internal Server Error" + } + }, + "security" : [ { + "Bearer" : [ ] + } ], + "summary" : "deletes a Survey123 connection with equivalency table mappings", + "tags" : [ "survey123" ] + }, + "put" : { + "parameters" : [ { + "description" : "project uuid", + "in" : "path", + "name" : "project_id", + "required" : true, + "schema" : { + "format" : "uuid", + "type" : "string" + } + }, { + "description" : "survey123 uuid", + "in" : "path", + "name" : "survey123_id", + "required" : true, + "schema" : { + "format" : "uuid", + "type" : "string" + } + } ], + "requestBody" : { + "content" : { + "application/json" : { + "schema" : { + "$ref" : "#/components/schemas/dto.Survey123" + } + } + }, + "description" : "survey123 payload", + "required" : true + }, + "responses" : { + "200" : { + "content" : { + "application/json" : { + "schema" : { + "additionalProperties" : { + "type" : "string" + }, + "type" : "object" + } + } + }, + "description" : "OK" + }, + "400" : { + "content" : { + "application/json" : { + "schema" : { + "$ref" : "#/components/schemas/echo.HTTPError" + } + } + }, + "description" : "Bad Request" + }, + "404" : { + "content" : { + "application/json" : { + "schema" : { + "$ref" : "#/components/schemas/echo.HTTPError" + } + } + }, + "description" : "Not Found" + }, + "500" : { + "content" : { + "application/json" : { + "schema" : { + "$ref" : "#/components/schemas/echo.HTTPError" + } + } + }, + "description" : "Internal Server Error" + } + }, + "security" : [ { + "Bearer" : [ ] + } ], + "summary" : "updates a Survey123 connection with equivalency table mappings", + "tags" : [ "survey123" ], + "x-codegen-request-body-name" : "survey123" + } + }, + "/projects/{project_id}/survey123/{survey123_id}/previews" : { + "get" : { + "parameters" : [ { + "description" : "project uuid", + "in" : "path", + "name" : "project_id", + "required" : true, + "schema" : { + "format" : "uuid", + "type" : "string" + } + }, { + "description" : "survey123 uuid", + "in" : "path", + "name" : "survey123_id", + "required" : true, + "schema" : { + "format" : "uuid", + "type" : "string" + } + } ], + "responses" : { + "200" : { + "content" : { + "application/json" : { + "schema" : { + "type" : "string" + } + } + }, + "description" : "OK" + }, + "400" : { + "content" : { + "application/json" : { + "schema" : { + "$ref" : "#/components/schemas/echo.HTTPError" + } + } + }, + "description" : "Bad Request" + }, + "404" : { + "content" : { + "application/json" : { + "schema" : { + "$ref" : "#/components/schemas/echo.HTTPError" + } + } + }, + "description" : "Not Found" + }, + "500" : { + "content" : { + "application/json" : { + "schema" : { + "$ref" : "#/components/schemas/echo.HTTPError" + } + } + }, + "description" : "Internal Server Error" + } + }, + "summary" : "gets the most recent Survey123 raw json payload sent from the webhook API", + "tags" : [ "survey123" ] + } + }, + "/projects/{project_id}/timeseries" : { + "get" : { + "parameters" : [ { + "description" : "project uuid", + "in" : "path", + "name" : "project_id", + "required" : true, + "schema" : { + "format" : "uuid", + "type" : "string" + } + } ], + "responses" : { + "200" : { + "content" : { + "application/json" : { + "schema" : { + "items" : { + "$ref" : "#/components/schemas/db.VTimeseries" + }, + "type" : "array" + } + } + }, + "description" : "OK" + }, + "400" : { + "content" : { + "application/json" : { + "schema" : { + "$ref" : "#/components/schemas/echo.HTTPError" + } + } + }, + "description" : "Bad Request" + }, + "404" : { + "content" : { + "application/json" : { + "schema" : { + "$ref" : "#/components/schemas/echo.HTTPError" + } + } + }, + "description" : "Not Found" + }, + "500" : { + "content" : { + "application/json" : { + "schema" : { + "$ref" : "#/components/schemas/echo.HTTPError" + } + } + }, + "description" : "Internal Server Error" + } + }, + "summary" : "lists all timeseries for a single project", + "tags" : [ "timeseries" ] + } + }, + "/projects/{project_id}/timeseries_measurements" : { + "post" : { + "parameters" : [ { + "description" : "project uuid", + "in" : "path", + "name" : "project_id", + "required" : true, + "schema" : { + "format" : "uuid", + "type" : "string" + } + }, { + "description" : "api key", + "in" : "query", + "name" : "key", + "schema" : { + "type" : "string" + } + } ], + "requestBody" : { + "content" : { + "application/json" : { + "schema" : { + "items" : { + "$ref" : "#/components/schemas/dto.MeasurementCollection" }, "type" : "array" } @@ -13159,23 +13270,6 @@ }, "type" : "object" }, - "db.PgTimezoneNamesListRow" : { - "properties" : { - "abbrev" : { - "type" : "string" - }, - "is_dst" : { - "type" : "boolean" - }, - "name" : { - "type" : "string" - }, - "utc_offset" : { - "type" : "string" - } - }, - "type" : "object" - }, "db.PlotConfigMeasurementListBullseyeRow" : { "properties" : { "time" : { @@ -13446,6 +13540,9 @@ "field_name" : { "type" : "string" }, + "instrument_field_name" : { + "type" : "string" + }, "timeseries_id" : { "type" : "string" }, @@ -13456,9 +13553,9 @@ "type" : "object" }, "db.UploaderConfigType" : { - "enum" : [ "csv", "dux", "toa5" ], + "enum" : [ "csv", "dux", "toa5", "xlsx" ], "type" : "string", - "x-enum-varnames" : [ "UploaderConfigTypeCsv", "UploaderConfigTypeDux", "UploaderConfigTypeToa5" ] + "x-enum-varnames" : [ "UploaderConfigTypeCsv", "UploaderConfigTypeDux", "UploaderConfigTypeToa5", "UploaderConfigTypeXlsx" ] }, "db.VAlert" : { "properties" : { @@ -14535,6 +14632,12 @@ "id" : { "type" : "string" }, + "instrument_field" : { + "type" : "string" + }, + "instrument_field_enabled" : { + "type" : "boolean" + }, "masked_field" : { "type" : "string" }, @@ -14544,6 +14647,9 @@ "name" : { "type" : "string" }, + "prefer_day_first" : { + "type" : "boolean" + }, "project_id" : { "type" : "string" }, @@ -14576,6 +14682,9 @@ }, "validated_field_enabled" : { "type" : "boolean" + }, + "xlsx_sheet_name" : { + "type" : "string" } }, "type" : "object" @@ -15878,6 +15987,70 @@ }, "type" : "object" }, + "dto.Survey123" : { + "properties" : { + "created_at" : { + "type" : "string" + }, + "created_by" : { + "type" : "string" + }, + "created_by_username" : { + "type" : "string" + }, + "errors" : { + "items" : { + "type" : "string" + }, + "type" : "array" + }, + "id" : { + "type" : "string" + }, + "name" : { + "type" : "string" + }, + "project_id" : { + "type" : "string" + }, + "rows" : { + "items" : { + "$ref" : "#/components/schemas/dto.Survey123EquivalencyTableRow" + }, + "type" : "array" + }, + "slug" : { + "type" : "string" + }, + "updated_by" : { + "type" : "string" + }, + "updated_by_username" : { + "type" : "string" + }, + "updatedd_at" : { + "type" : "string" + } + }, + "type" : "object" + }, + "dto.Survey123EquivalencyTableRow" : { + "properties" : { + "display_name" : { + "type" : "string" + }, + "field_name" : { + "type" : "string" + }, + "instrument_id" : { + "type" : "string" + }, + "timeseries_id" : { + "type" : "string" + } + }, + "type" : "object" + }, "dto.TextOption" : { "properties" : { "enabled" : { @@ -16054,12 +16227,21 @@ "created_by_username" : { "type" : "string" }, + "depth_based_instrument_id" : { + "type" : "string" + }, "description" : { "type" : "string" }, "id" : { "type" : "string" }, + "instrument_field" : { + "type" : "string" + }, + "instrument_field_enabled" : { + "type" : "boolean" + }, "masked_field" : { "type" : "string" }, @@ -16101,6 +16283,9 @@ }, "validated_field_enabled" : { "type" : "boolean" + }, + "xlsx_sheet_name" : { + "type" : "string" } }, "type" : "object" @@ -16110,6 +16295,9 @@ "field_name" : { "type" : "string" }, + "instrument_field_name" : { + "type" : "string" + }, "timeseries_id" : { "type" : "string" } @@ -16117,9 +16305,9 @@ "type" : "object" }, "dto.UploaderConfigType" : { - "enum" : [ "csv", "dux", "toa5" ], + "enum" : [ "csv", "dux", "toa5", "xlsx" ], "type" : "string", - "x-enum-varnames" : [ "CSV", "DUX", "TOA5" ] + "x-enum-varnames" : [ "CSV", "DUX", "TOA5", "XLSX" ] }, "service.AggregatePlotConfigMeasurementsContourPlot" : { "properties" : { diff --git a/api/internal/server/docs/openapi.yaml b/api/internal/server/docs/openapi.yaml index 45968ac2..44ab7f58 100644 --- a/api/internal/server/docs/openapi.yaml +++ b/api/internal/server/docs/openapi.yaml @@ -1035,7 +1035,7 @@ paths: application/json: schema: items: - $ref: '#/components/schemas/db.PgTimezoneNamesListRow' + type: string type: array description: OK "400": @@ -5338,174 +5338,6 @@ paths: tags: - instrument x-codegen-request-body-name: project_ids - /projects/{project_id}/instruments/{instrument_id}/constants: - get: - parameters: - - description: project uuid - in: path - name: project_id - required: true - schema: - format: uuid - type: string - - description: instrument uuid - in: path - name: instrument_id - required: true - schema: - format: uuid - type: string - responses: - "200": - content: - application/json: - schema: - items: - $ref: '#/components/schemas/db.VTimeseries' - type: array - description: OK - "400": - content: - application/json: - schema: - $ref: '#/components/schemas/echo.HTTPError' - description: Bad Request - "404": - content: - application/json: - schema: - $ref: '#/components/schemas/echo.HTTPError' - description: Not Found - "500": - content: - application/json: - schema: - $ref: '#/components/schemas/echo.HTTPError' - description: Internal Server Error - summary: lists constants for a given instrument - tags: - - instrument-constant - post: - parameters: - - description: project uuid - in: path - name: project_id - required: true - schema: - format: uuid - type: string - - description: instrument uuid - in: path - name: instrument_id - required: true - schema: - format: uuid - type: string - - description: api key - in: query - name: key - schema: - type: string - requestBody: - content: - '*/*': - schema: - $ref: '#/components/schemas/dto.TimeseriesCollectionItems' - description: timeseries collection items payload - required: true - responses: - "200": - content: - application/json: - schema: - items: - $ref: '#/components/schemas/db.TimeseriesCreateBatchRow' - type: array - description: OK - "400": - content: - application/json: - schema: - $ref: '#/components/schemas/echo.HTTPError' - description: Bad Request - "404": - content: - application/json: - schema: - $ref: '#/components/schemas/echo.HTTPError' - description: Not Found - "500": - content: - application/json: - schema: - $ref: '#/components/schemas/echo.HTTPError' - description: Internal Server Error - security: - - Bearer: [] - summary: creates instrument constants (i.e. timeseries) - tags: - - instrument-constant - x-codegen-request-body-name: timeseries_collection_items - /projects/{project_id}/instruments/{instrument_id}/constants/{timeseries_id}: - delete: - parameters: - - description: project uuid - in: path - name: project_id - required: true - schema: - format: uuid - type: string - - description: instrument uuid - in: path - name: instrument_id - required: true - schema: - format: uuid - type: string - - description: timeseries uuid - in: path - name: timeseries_id - required: true - schema: - format: uuid - type: string - - description: api key - in: query - name: key - schema: - type: string - responses: - "200": - content: - application/json: - schema: - additionalProperties: true - type: object - description: OK - "400": - content: - application/json: - schema: - $ref: '#/components/schemas/echo.HTTPError' - description: Bad Request - "404": - content: - application/json: - schema: - $ref: '#/components/schemas/echo.HTTPError' - description: Not Found - "500": - content: - application/json: - schema: - $ref: '#/components/schemas/echo.HTTPError' - description: Internal Server Error - security: - - Bearer: [] - summary: removes a timeseries as an instrument constant - tags: - - instrument-constant /projects/{project_id}/instruments/{instrument_id}/evaluations: get: parameters: @@ -7425,6 +7257,246 @@ paths: summary: lists all submittals for a project tags: - submittal + /projects/{project_id}/survey123: + get: + parameters: + - description: project uuid + in: path + name: project_id + required: true + schema: + format: uuid + type: string + responses: + "200": + content: + application/json: + schema: + items: + $ref: '#/components/schemas/dto.Survey123' + type: array + description: OK + "400": + content: + application/json: + schema: + $ref: '#/components/schemas/echo.HTTPError' + description: Bad Request + "404": + content: + application/json: + schema: + $ref: '#/components/schemas/echo.HTTPError' + description: Not Found + "500": + content: + application/json: + schema: + $ref: '#/components/schemas/echo.HTTPError' + description: Internal Server Error + summary: lists Survey123 connections for a project + tags: + - survey123 + post: + parameters: + - description: project uuid + in: path + name: project_id + required: true + schema: + format: uuid + type: string + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/dto.Survey123' + description: survey123 payload + required: true + responses: + "200": + content: + application/json: + schema: + additionalProperties: + type: string + type: object + description: OK + "400": + content: + application/json: + schema: + $ref: '#/components/schemas/echo.HTTPError' + description: Bad Request + "404": + content: + application/json: + schema: + $ref: '#/components/schemas/echo.HTTPError' + description: Not Found + "500": + content: + application/json: + schema: + $ref: '#/components/schemas/echo.HTTPError' + description: Internal Server Error + security: + - Bearer: [] + summary: creates a Survey123 connection with equivalency table mappings + tags: + - survey123 + x-codegen-request-body-name: survey123 + /projects/{project_id}/survey123/{survey123_id}: + delete: + parameters: + - description: project uuid + in: path + name: project_id + required: true + schema: + format: uuid + type: string + - description: survey123 uuid + in: path + name: survey123_id + required: true + schema: + format: uuid + type: string + responses: + "200": + content: + application/json: + schema: + additionalProperties: + type: string + type: object + description: OK + "400": + content: + application/json: + schema: + $ref: '#/components/schemas/echo.HTTPError' + description: Bad Request + "404": + content: + application/json: + schema: + $ref: '#/components/schemas/echo.HTTPError' + description: Not Found + "500": + content: + application/json: + schema: + $ref: '#/components/schemas/echo.HTTPError' + description: Internal Server Error + security: + - Bearer: [] + summary: deletes a Survey123 connection with equivalency table mappings + tags: + - survey123 + put: + parameters: + - description: project uuid + in: path + name: project_id + required: true + schema: + format: uuid + type: string + - description: survey123 uuid + in: path + name: survey123_id + required: true + schema: + format: uuid + type: string + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/dto.Survey123' + description: survey123 payload + required: true + responses: + "200": + content: + application/json: + schema: + additionalProperties: + type: string + type: object + description: OK + "400": + content: + application/json: + schema: + $ref: '#/components/schemas/echo.HTTPError' + description: Bad Request + "404": + content: + application/json: + schema: + $ref: '#/components/schemas/echo.HTTPError' + description: Not Found + "500": + content: + application/json: + schema: + $ref: '#/components/schemas/echo.HTTPError' + description: Internal Server Error + security: + - Bearer: [] + summary: updates a Survey123 connection with equivalency table mappings + tags: + - survey123 + x-codegen-request-body-name: survey123 + /projects/{project_id}/survey123/{survey123_id}/previews: + get: + parameters: + - description: project uuid + in: path + name: project_id + required: true + schema: + format: uuid + type: string + - description: survey123 uuid + in: path + name: survey123_id + required: true + schema: + format: uuid + type: string + responses: + "200": + content: + application/json: + schema: + type: string + description: OK + "400": + content: + application/json: + schema: + $ref: '#/components/schemas/echo.HTTPError' + description: Bad Request + "404": + content: + application/json: + schema: + $ref: '#/components/schemas/echo.HTTPError' + description: Not Found + "500": + content: + application/json: + schema: + $ref: '#/components/schemas/echo.HTTPError' + description: Internal Server Error + summary: gets the most recent Survey123 raw json payload sent from the webhook + API + tags: + - survey123 /projects/{project_id}/timeseries: get: parameters: @@ -8983,22 +9055,6 @@ components: additionalProperties: type: number type: object - db.PgTimezoneNamesListRow: - example: - utc_offset: utc_offset - name: name - abbrev: abbrev - is_dst: true - properties: - abbrev: - type: string - is_dst: - type: boolean - name: - type: string - utc_offset: - type: string - type: object db.PlotConfigMeasurementListBullseyeRow: example: x: "{}" @@ -9275,11 +9331,14 @@ components: db.UploaderConfigMapping: example: timeseries_id: timeseries_id + instrument_field_name: instrument_field_name uploader_config_id: uploader_config_id field_name: field_name properties: field_name: type: string + instrument_field_name: + type: string timeseries_id: type: string uploader_config_id: @@ -9290,11 +9349,13 @@ components: - csv - dux - toa5 + - xlsx type: string x-enum-varnames: - UploaderConfigTypeCsv - UploaderConfigTypeDux - UploaderConfigTypeToa5 + - UploaderConfigTypeXlsx db.VAlert: example: instruments: @@ -10451,29 +10512,33 @@ components: type: object db.VUploaderConfig: example: - validated_field: validated_field created_at: created_at description: description - row_offset: 6 comment_field: comment_field type: csv - created_by: created_by updated_by_username: updated_by_username - time_field: time_field + xlsx_sheet_name: xlsx_sheet_name masked_field: masked_field tz_name: tz_name updated_at: updated_at comment_field_enabled: true + instrument_field_enabled: true project_id: project_id column_offset: 0 - name: name - updated_by: updated_by - masked_field_enabled: true created_by_username: created_by_username id: id - validated_field_enabled: true depth_based_instrument_id: depth_based_instrument_id slug: slug + instrument_field: instrument_field + validated_field: validated_field + prefer_day_first: true + row_offset: 6 + created_by: created_by + time_field: time_field + name: name + updated_by: updated_by + masked_field_enabled: true + validated_field_enabled: true properties: column_offset: type: integer @@ -10493,12 +10558,18 @@ components: type: string id: type: string + instrument_field: + type: string + instrument_field_enabled: + type: boolean masked_field: type: string masked_field_enabled: type: boolean name: type: string + prefer_day_first: + type: boolean project_id: type: string row_offset: @@ -10521,6 +10592,8 @@ components: type: string validated_field_enabled: type: boolean + xlsx_sheet_name: + type: string type: object dto.AlertConfig: properties: @@ -11520,6 +11593,76 @@ components: z_timeseries_id: type: string type: object + dto.Survey123: + example: + updated_by_username: updated_by_username + project_id: project_id + name: name + updated_by: updated_by + created_at: created_at + updatedd_at: updatedd_at + created_by_username: created_by_username + id: id + rows: + - timeseries_id: timeseries_id + display_name: display_name + instrument_id: instrument_id + field_name: field_name + - timeseries_id: timeseries_id + display_name: display_name + instrument_id: instrument_id + field_name: field_name + created_by: created_by + errors: + - errors + - errors + slug: slug + properties: + created_at: + type: string + created_by: + type: string + created_by_username: + type: string + errors: + items: + type: string + type: array + id: + type: string + name: + type: string + project_id: + type: string + rows: + items: + $ref: '#/components/schemas/dto.Survey123EquivalencyTableRow' + type: array + slug: + type: string + updated_by: + type: string + updated_by_username: + type: string + updatedd_at: + type: string + type: object + dto.Survey123EquivalencyTableRow: + example: + timeseries_id: timeseries_id + display_name: display_name + instrument_id: instrument_id + field_name: field_name + properties: + display_name: + type: string + field_name: + type: string + instrument_id: + type: string + timeseries_id: + type: string + type: object dto.TextOption: properties: enabled: @@ -11694,10 +11837,16 @@ components: type: string created_by_username: type: string + depth_based_instrument_id: + type: string description: type: string id: type: string + instrument_field: + type: string + instrument_field_enabled: + type: boolean masked_field: type: string masked_field_enabled: @@ -11726,14 +11875,19 @@ components: type: string validated_field_enabled: type: boolean + xlsx_sheet_name: + type: string type: object dto.UploaderConfigMapping: example: timeseries_id: timeseries_id + instrument_field_name: instrument_field_name field_name: field_name properties: field_name: type: string + instrument_field_name: + type: string timeseries_id: type: string type: object @@ -11742,11 +11896,13 @@ components: - csv - dux - toa5 + - xlsx type: string x-enum-varnames: - CSV - DUX - TOA5 + - XLSX service.AggregatePlotConfigMeasurementsContourPlot: example: x: diff --git a/api/internal/service/datalogger_parser.go b/api/internal/service/datalogger_parser.go deleted file mode 100644 index 7387707d..00000000 --- a/api/internal/service/datalogger_parser.go +++ /dev/null @@ -1,96 +0,0 @@ -package service - -import ( - "context" - "encoding/csv" - "io" - - "github.com/USACE/instrumentation-api/api/internal/db" - "github.com/USACE/instrumentation-api/api/internal/dto" - "github.com/google/uuid" -) - -// datalogger toa5/dat parser -func (s DBService) TimeseriesMeasurementCreateBatchForDataloggerFromTOA5File(ctx context.Context, r io.Reader) error { - tx, err := s.db.Begin(ctx) - if err != nil { - return err - } - defer txDo(ctx, tx.Rollback) - qtx := s.WithTx(tx) - - reader := csv.NewReader(r) - - envHeader, err := reader.Read() - if err != nil { - return err - } - fieldHeader, err := reader.Read() - if err != nil { - return err - } - // skip units header - _, err = reader.Read() - if err != nil { - return err - } - // skip process header - _, err = reader.Read() - if err != nil { - return err - } - - meta := dto.Environment{ - // StationName: envHeader[1], - Model: envHeader[2], - SerialNo: envHeader[3], - // OSVersion: envHeader[4], - // ProgName: envHeader[5], - TableName: envHeader[6], - } - - dl, err := qtx.DataloggerGetForModelSn(ctx, db.DataloggerGetForModelSnParams{ - Model: &meta.Model, - Sn: meta.SerialNo, - }) - if err != nil { - return err - } - tableID, err := qtx.DataloggerTableGetOrCreate(ctx, db.DataloggerTableGetOrCreateParams{ - DataloggerID: dl.ID, - TableName: meta.TableName, - }) - if err != nil { - return err - } - - // first two columns are timestamp and record number - // we only want to collect the measurement fields here - fields := make([]string, len(fieldHeader)-2) - for i := 2; i < len(fieldHeader); i++ { - fields[i] = fieldHeader[i] - } - - eqt, err := qtx.EquivalencyTableGet(ctx, tableID) - if err != nil { - return err - } - - fieldNameTimeseriesIDMap := make(map[string]uuid.UUID) - for _, eqtRow := range eqt.Fields { - if eqtRow.TimeseriesID == nil { - continue - } - fieldNameTimeseriesIDMap[eqtRow.FieldName] = *eqtRow.TimeseriesID - } - - if err := timeseriesMeasurementNoteCreateOrUpdateBatch(ctx, qtx, reader, timeseriesMeasurementNoteCreateOrUpdateBatchParams{ - fields: fields, - fieldNameTimeseriesIDMap: fieldNameTimeseriesIDMap, - timezone: "UTC", - }); err != nil { - return err - } - - return tx.Commit(ctx) -} diff --git a/api/internal/service/datalogger_telemetry.go b/api/internal/service/datalogger_telemetry.go index eef72bfd..f56c1343 100644 --- a/api/internal/service/datalogger_telemetry.go +++ b/api/internal/service/datalogger_telemetry.go @@ -19,7 +19,7 @@ import ( func (s DBService) DataloggerTablePreviewCreate(ctx context.Context, prv dto.DataloggerTablePreview) error { return s.Queries.DataloggerTablePreviewCreate(ctx, db.DataloggerTablePreviewCreateParams{ DataloggerTableID: prv.DataloggerTableID, - UpdatedAt: prv.UpdatedAt, + UpdatedAt: prv.UpdatedAt, Preview: prv.Preview, }) } @@ -54,7 +54,7 @@ func (s DBService) DataloggerTablePreviewUpdate(ctx context.Context, dataloggerI DataloggerID: dataloggerID, TableName: tableName, Preview: prv.Preview, - UpdatedAt: prv.UpdatedAt, + UpdatedAt: prv.UpdatedAt, }); err != nil { if !errors.Is(err, sql.ErrNoRows) { return uuid.Nil, err @@ -63,7 +63,7 @@ func (s DBService) DataloggerTablePreviewUpdate(ctx context.Context, dataloggerI if err := qtx.DataloggerTablePreviewCreate(ctx, db.DataloggerTablePreviewCreateParams{ DataloggerTableID: prv.DataloggerTableID, Preview: prv.Preview, - UpdatedAt: prv.UpdatedAt, + UpdatedAt: prv.UpdatedAt, }); err != nil { } } @@ -201,7 +201,7 @@ func (s DBService) TimeseriesMeasurementCreateOrUpdateDataloggerTOA5Upload(ctx c if err != nil { return err } - t, err := time.Parse(record[0], time.RFC3339) + t, err := time.Parse(time.RFC3339, record[0]) if err != nil { return err } diff --git a/api/internal/service/equivalency_table.go b/api/internal/service/equivalency_table.go index a8915528..fb6d0569 100644 --- a/api/internal/service/equivalency_table.go +++ b/api/internal/service/equivalency_table.go @@ -6,11 +6,11 @@ import ( "github.com/USACE/instrumentation-api/api/internal/db" "github.com/USACE/instrumentation-api/api/internal/dto" + "github.com/google/uuid" ) func (s DBService) EquivalencyTableCreateOrUpdate(ctx context.Context, t dto.EquivalencyTable) (db.VDataloggerEquivalencyTable, error) { var a db.VDataloggerEquivalencyTable - tx, err := s.db.Begin(ctx) if err != nil { return a, err @@ -18,29 +18,38 @@ func (s DBService) EquivalencyTableCreateOrUpdate(ctx context.Context, t dto.Equ defer txDo(ctx, tx.Rollback) qtx := s.WithTx(tx) - for _, r := range t.Fields { + tsIDs := make([]uuid.UUID, 0) + args := make([]db.EquivalencyTableCreateOrUpdateBatchParams, len(t.Fields)) + for idx, r := range t.Fields { if r.TimeseriesID != nil { - valid, err := qtx.EquivalencyTableTimeseriesGetIsValid(ctx, *r.TimeseriesID) - if err != nil { - return a, err - } - if !valid { - return a, errors.New("equivalency table timeseries invalid") - } + tsIDs = append(tsIDs, *r.TimeseriesID) } - if err := qtx.EquivalencyTableCreateOrUpdate(ctx, db.EquivalencyTableCreateOrUpdateParams{ + args[idx] = db.EquivalencyTableCreateOrUpdateBatchParams{ DataloggerID: t.DataloggerID, DataloggerTableID: &t.DataloggerTableID, FieldName: r.FieldName, DisplayName: &r.DisplayName, InstrumentID: r.InstrumentID, TimeseriesID: r.TimeseriesID, - }); err != nil { + } + } + + if len(tsIDs) != 0 { + valid, err := qtx.TimeseriesGetIsStandard(ctx, tsIDs) + if err != nil { return a, err } + if !valid { + return a, errors.New("one or more timeseries ids in the equivalency table are invalid") + } } - eqt, err := qtx.EquivalencyTableGet(ctx, t.DataloggerTableID) + qtx.EquivalencyTableCreateOrUpdateBatch(ctx, args).Exec(batchExecErr(&err)) + if err != nil { + return a, err + } + + a, err = qtx.EquivalencyTableGet(ctx, t.DataloggerTableID) if err != nil { return a, err } @@ -49,7 +58,7 @@ func (s DBService) EquivalencyTableCreateOrUpdate(ctx context.Context, t dto.Equ return a, err } - return eqt, nil + return a, nil } func (s DBService) EquivalencyTableUpdate(ctx context.Context, t dto.EquivalencyTable) (db.VDataloggerEquivalencyTable, error) { @@ -59,33 +68,43 @@ func (s DBService) EquivalencyTableUpdate(ctx context.Context, t dto.Equivalency return a, err } defer txDo(ctx, tx.Rollback) - qtx := s.WithTx(tx) - for _, r := range t.Fields { + tsIDs := make([]uuid.UUID, 0) + args := make([]db.EquivalencyTableUpdateBatchParams, len(t.Fields)) + for idx, r := range t.Fields { if r.TimeseriesID != nil { - valid, err := qtx.EquivalencyTableTimeseriesGetIsValid(ctx, *r.TimeseriesID) - if err != nil { - return a, err - } - if !valid { - return a, errors.New("equivalency table timeseries invalid") - } + tsIDs = append(tsIDs, *r.TimeseriesID) } - if err := qtx.EquivalencyTableUpdate(ctx, db.EquivalencyTableUpdateParams{ + args[idx] = db.EquivalencyTableUpdateBatchParams{ ID: r.ID, FieldName: r.FieldName, DisplayName: &r.DisplayName, - }); err != nil { + } + } + + if len(tsIDs) != 0 { + valid, err := qtx.TimeseriesGetIsStandard(ctx, tsIDs) + if err != nil { return a, err } + if !valid { + return a, errors.New("one or more timeseries ids in the equivalency table are invalid") + } } - eqt, err := qtx.EquivalencyTableGet(ctx, t.DataloggerTableID) + qtx.EquivalencyTableUpdateBatch(ctx, args).Exec(batchExecErr(&err)) + if err != nil { + return a, err + } + a, err = qtx.EquivalencyTableGet(ctx, t.DataloggerTableID) + if err != nil { + return a, err + } if err := tx.Commit(ctx); err != nil { return a, err } - return eqt, nil + return a, nil } diff --git a/api/internal/service/heartbeat.go b/api/internal/service/heartbeat.go index 025d8423..276da4ee 100644 --- a/api/internal/service/heartbeat.go +++ b/api/internal/service/heartbeat.go @@ -6,11 +6,11 @@ import ( ) type Healthcheck struct { - Status string + Status string `json:"status"` } type Heartbeat struct { - Time time.Time + Time time.Time `json:"time"` } func (s DBService) HeartbeatCreate(ctx context.Context, argTime time.Time) (Heartbeat, error) { diff --git a/api/internal/service/instrument_constant.go b/api/internal/service/instrument_constant.go deleted file mode 100644 index 3b98947d..00000000 --- a/api/internal/service/instrument_constant.go +++ /dev/null @@ -1,56 +0,0 @@ -package service - -import ( - "context" - "errors" - - "github.com/USACE/instrumentation-api/api/internal/db" - "github.com/USACE/instrumentation-api/api/internal/dto" -) - -func (s DBService) InstrumentConstantCreateBatch(ctx context.Context, tt []dto.Timeseries) ([]db.TimeseriesCreateBatchRow, error) { - tx, err := s.db.Begin(ctx) - if err != nil { - return nil, err - } - defer txDo(ctx, tx.Rollback) - qtx := s.WithTx(tx) - - createTimeseriesParams := make([]db.TimeseriesCreateBatchParams, len(tt)) - for idx, t := range tt { - createTimeseriesParams[idx] = db.TimeseriesCreateBatchParams{ - InstrumentID: &t.InstrumentID, - Name: t.Name, - ParameterID: t.ParameterID, - UnitID: t.UnitID, - Type: db.TimeseriesTypeConstant, - } - } - uu := make([]db.TimeseriesCreateBatchRow, len(createTimeseriesParams)) - createConstantsParams := make([]db.InstrumentConstantCreateBatchParams, len(createTimeseriesParams)) - qtx.TimeseriesCreateBatch(ctx, createTimeseriesParams).QueryRow(func(i int, r db.TimeseriesCreateBatchRow, e error) { - if e != nil { - err = e - return - } - if r.InstrumentID == nil { - err = errors.New("instrument id must not be nil") - } - createConstantsParams[i] = db.InstrumentConstantCreateBatchParams{ - InstrumentID: *r.InstrumentID, - TimeseriesID: r.ID, - } - uu[i] = r - }) - if err != nil { - return nil, err - } - qtx.InstrumentConstantCreateBatch(ctx, createConstantsParams).Exec(batchExecErr(&err)) - if err != nil { - return nil, err - } - if err := tx.Commit(ctx); err != nil { - return nil, err - } - return uu, nil -} diff --git a/api/internal/service/instrument_incl.go b/api/internal/service/instrument_incl.go index 71ad5404..2a734bdc 100644 --- a/api/internal/service/instrument_incl.go +++ b/api/internal/service/instrument_incl.go @@ -2,7 +2,6 @@ package service import ( "context" - "errors" "slices" "time" @@ -80,30 +79,17 @@ func createInclOptsBatch(ctx context.Context, q *db.Queries, ii []dto.Instrument args := slices.Concat(createTimeseriesBatchParams...) inclArgs := slices.Concat(createInclSegmentBatchParams...) - createInstrumentConstantBatchParams := make([]db.InstrumentConstantCreateBatchParams, len(args)) var err error - q.TimeseriesCreateBatch(ctx, args).QueryRow(func(i int, r db.TimeseriesCreateBatchRow, e error) { + q.TimeseriesCreateBatch(ctx, args).QueryRow(func(_ int, _ db.TimeseriesCreateBatchRow, e error) { if e != nil { err = e return } - if r.InstrumentID == nil { - err = errors.New("new timeseries must have instrument id") - return - } - createInstrumentConstantBatchParams[i] = db.InstrumentConstantCreateBatchParams{ - TimeseriesID: r.ID, - InstrumentID: *r.InstrumentID, - } }) if err != nil { return err } - q.InstrumentConstantCreateBatch(ctx, createInstrumentConstantBatchParams).Exec(batchExecErr(&err)) - if err != nil { - return err - } q.InclSegmentCreateBatch(ctx, inclArgs).Exec(batchExecErr(&err)) if err != nil { return err diff --git a/api/internal/service/instrument_ipi.go b/api/internal/service/instrument_ipi.go index cc0e8746..7ba50b16 100644 --- a/api/internal/service/instrument_ipi.go +++ b/api/internal/service/instrument_ipi.go @@ -101,7 +101,6 @@ func createIpiOptsBatch(ctx context.Context, q *db.Queries, ii []dto.Instrument) args := slices.Concat(createTimeseriesBatchParams...) ipiArgs := slices.Concat(createIpiSegmentBatchParams...) - createInstrumentConstantBatchParams := make([]db.InstrumentConstantCreateBatchParams, len(args)) var err error q.TimeseriesCreateBatch(ctx, args).QueryRow(func(i int, r db.TimeseriesCreateBatchRow, e error) { @@ -113,19 +112,11 @@ func createIpiOptsBatch(ctx context.Context, q *db.Queries, ii []dto.Instrument) err = errors.New("new timeseries must have instrument id") return } - createInstrumentConstantBatchParams[i] = db.InstrumentConstantCreateBatchParams{ - TimeseriesID: r.ID, - InstrumentID: *r.InstrumentID, - } ipiArgs[i].LengthTimeseriesID = &r.ID }) if err != nil { return err } - q.InstrumentConstantCreateBatch(ctx, createInstrumentConstantBatchParams).Exec(batchExecErr(&err)) - if err != nil { - return err - } q.IpiSegmentCreateBatch(ctx, ipiArgs).Exec(batchExecErr(&err)) if err != nil { return err diff --git a/api/internal/service/instrument_saa.go b/api/internal/service/instrument_saa.go index 9e5bbbac..738ccf01 100644 --- a/api/internal/service/instrument_saa.go +++ b/api/internal/service/instrument_saa.go @@ -101,7 +101,6 @@ func createSaaOptsBatch(ctx context.Context, q *db.Queries, ii []dto.Instrument) args := slices.Concat(createTimeseriesBatchParams...) saaArgs := slices.Concat(createSaaSegmentBatchParams...) - createInstrumentConstantBatchParams := make([]db.InstrumentConstantCreateBatchParams, len(args)) var err error q.TimeseriesCreateBatch(ctx, args).QueryRow(func(i int, r db.TimeseriesCreateBatchRow, e error) { @@ -113,19 +112,11 @@ func createSaaOptsBatch(ctx context.Context, q *db.Queries, ii []dto.Instrument) err = errors.New("new timeseries must have instrument id") return } - createInstrumentConstantBatchParams[i] = db.InstrumentConstantCreateBatchParams{ - TimeseriesID: r.ID, - InstrumentID: *r.InstrumentID, - } saaArgs[i].LengthTimeseriesID = &r.ID }) if err != nil { return err } - q.InstrumentConstantCreateBatch(ctx, createInstrumentConstantBatchParams).Exec(batchExecErr(&err)) - if err != nil { - return err - } q.SaaSegmentCreateBatch(ctx, saaArgs).Exec(batchExecErr(&err)) if err != nil { return err diff --git a/api/internal/service/survey123.go b/api/internal/service/survey123.go new file mode 100644 index 00000000..41831baf --- /dev/null +++ b/api/internal/service/survey123.go @@ -0,0 +1,327 @@ +package service + +import ( + "context" + "database/sql" + "errors" + "fmt" + "log" + "math" + "strconv" + "strings" + "time" + + "github.com/USACE/instrumentation-api/api/internal/db" + "github.com/USACE/instrumentation-api/api/internal/dto" + "github.com/google/uuid" +) + +func (s DBService) Survey123Create(ctx context.Context, sv dto.Survey123) (uuid.UUID, error) { + tx, err := s.db.Begin(ctx) + if err != nil { + return uuid.Nil, err + } + defer txDo(ctx, tx.Rollback) + qtx := s.WithTx(tx) + + newID, err := qtx.Survey123Create(ctx, db.Survey123CreateParams{ + ProjectID: sv.ProjectID, + Name: sv.Name, + CreatedBy: sv.CreatedBy, + }) + if err != nil { + return uuid.Nil, err + } + + tsIDs := make([]uuid.UUID, 0) + args := make([]db.Survey123EquivalencyTableRowCreateOrUpdateBatchParams, len(sv.Rows)) + for idx, r := range sv.Rows { + if r.TimeseriesID != nil { + tsIDs = append(tsIDs, *r.TimeseriesID) + } + args[idx] = db.Survey123EquivalencyTableRowCreateOrUpdateBatchParams{ + Survey123ID: newID, + FieldName: r.FieldName, + DisplayName: &r.DisplayName, + InstrumentID: r.InstrumentID, + TimeseriesID: r.TimeseriesID, + } + } + + if len(tsIDs) != 0 { + isValid, err := qtx.TimeseriesGetIsStandard(ctx, tsIDs) + if err != nil { + return uuid.Nil, err + } + if !isValid { + return uuid.Nil, errors.New("one or more timeseries ids in the equivalency table are invalid") + } + } + + qtx.Survey123EquivalencyTableRowCreateOrUpdateBatch(ctx, args).Exec(batchExecErr(&err)) + if err != nil { + return uuid.Nil, err + } + + return newID, tx.Commit(ctx) +} + +func (s DBService) Survey123Update(ctx context.Context, sv dto.Survey123) error { + tx, err := s.db.Begin(ctx) + if err != nil { + return err + } + defer txDo(ctx, tx.Rollback) + qtx := s.WithTx(tx) + + if err := qtx.Survey123Update(ctx, db.Survey123UpdateParams{ + ID: sv.ID, + Name: sv.Name, + UpdatedBy: sv.UpdatedBy, + UpdatedAt: sv.UpdatedAt, + }); err != nil { + return err + } + + if err := qtx.Survey123EquivalencyTableRowDeleteForSurvey123(ctx, sv.ID); err != nil { + return err + } + + tsIDs := make([]uuid.UUID, 0) + args := make([]db.Survey123EquivalencyTableRowCreateOrUpdateBatchParams, len(sv.Rows)) + for idx, r := range sv.Rows { + if r.TimeseriesID != nil { + tsIDs = append(tsIDs, *r.TimeseriesID) + } + args[idx] = db.Survey123EquivalencyTableRowCreateOrUpdateBatchParams{ + Survey123ID: sv.ID, + FieldName: r.FieldName, + DisplayName: &r.DisplayName, + InstrumentID: r.InstrumentID, + TimeseriesID: r.TimeseriesID, + } + } + + if len(tsIDs) != 0 { + isValid, err := qtx.TimeseriesGetIsStandard(ctx, tsIDs) + if err != nil { + return err + } + if !isValid { + return errors.New("one or more timeseries ids in the equivalency table are invalid") + } + } + + qtx.Survey123EquivalencyTableRowCreateOrUpdateBatch(ctx, args).Exec(batchExecErr(&err)) + if err != nil { + return err + } + + return tx.Commit(ctx) +} + +func (s DBService) createOrUpdateSurvey123PayloadError(ctx context.Context, survey123ID uuid.UUID, errMsgs []string) error { + tx, err := s.db.Begin(ctx) + if err != nil { + return err + } + defer txDo(ctx, tx.Rollback) + qtx := s.WithTx(tx) + + if err := qtx.Survey123PayloadErrorDeleteForSurvey123(ctx, survey123ID); err != nil { + if !errors.Is(err, sql.ErrNoRows) { + return err + } + } + + args := make([]db.Survey123PayloadErrorCreateBatchParams, len(errMsgs)) + for idx, errMsg := range errMsgs { + args[idx] = db.Survey123PayloadErrorCreateBatchParams{ + Survey123ID: survey123ID, + ErrorMessage: &errMsg, + } + } + qtx.Survey123PayloadErrorCreateBatch(ctx, args).Exec(batchExecErr(&err)) + if err != nil { + return err + } + + return tx.Commit(ctx) +} + +func (s DBService) Survey123MeasurementCreateOrUpdateBatch(ctx context.Context, survey123ID uuid.UUID, sp dto.Survey123Payload, rr []db.Survey123EquivalencyTable) error { + eqt := make(map[string]dto.Survey123EquivalencyTableRow) + for _, r := range rr { + eqt[r.FieldName] = dto.Survey123EquivalencyTableRow{ + TimeseriesID: r.TimeseriesID, + InstrumentID: r.InstrumentID, + } + } + + em := make([]string, 0) + defer func() { + if err := s.createOrUpdateSurvey123PayloadError(ctx, survey123ID, em); err != nil { + log.Printf(err.Error()) + } + }() + + tx, err := s.db.Begin(ctx) + if err != nil { + return err + } + defer txDo(ctx, tx.Rollback) + qtx := s.WithTx(tx) + + arg := make([]db.TimeseriesMeasurementCreateOrUpdateBatchParams, 0) + + switch sp.EventType { + case "addData": + for _, edit := range sp.Edits { + for _, r := range edit.Adds { + mm, err := parseAttributes(r.Attributes, eqt) + if err != nil { + em = append(em, err.Error()) + continue + } + + for _, m := range mm { + arg = append(arg, db.TimeseriesMeasurementCreateOrUpdateBatchParams{ + TimeseriesID: m.TimeseriesID, + Time: m.Time, + Value: m.Value, + }) + } + } + } + case "editData": + for _, edit := range sp.Edits { + for _, r := range edit.Updates { + mm, err := parseAttributes(r.Attributes, eqt) + if err != nil { + em = append(em, err.Error()) + continue + } + for _, m := range mm { + arg = append(arg, db.TimeseriesMeasurementCreateOrUpdateBatchParams{ + TimeseriesID: m.TimeseriesID, + Time: m.Time, + Value: m.Value, + }) + } + } + for _, r := range edit.Adds { + mm, err := parseAttributes(r.Attributes, eqt) + if err != nil { + em = append(em, err.Error()) + continue + } + for _, m := range mm { + arg = append(arg, db.TimeseriesMeasurementCreateOrUpdateBatchParams{ + TimeseriesID: m.TimeseriesID, + Time: m.Time, + Value: m.Value, + }) + } + } + } + default: + return errors.New("invalid value for 'eventType'") + } + + qtx.TimeseriesMeasurementCreateOrUpdateBatch(ctx, arg).Exec(batchExecErr(&err)) + if err != nil { + return err + } + + return tx.Commit(ctx) +} + +func parseAttributes(attr map[string]interface{}, eqt map[string]dto.Survey123EquivalencyTableRow) ([]db.Measurement, error) { + errs := make([]error, 0) + mappings := make(map[string]map[string]interface{}) + + // group by instrument prefix + for k, v := range attr { + if k == "" { + errs = append(errs, errors.New("invalid key format, skipping fields with empty string key")) + continue + } + before, after, found := strings.Cut(k, "__") + if !found { + mappings[""] = map[string]interface{}{k: v} + continue + } + mappings[before] = map[string]interface{}{after: v} + } + + mm := make([]db.Measurement, 0) + for prefix, subMap := range mappings { + dtKey := "datetime" + commentKey := "comment" + + dtTmp, exists := subMap[dtKey] + if !exists { + if prefix != "" { + dtKey = "__" + dtKey + } + errs = append(errs, fmt.Errorf("expected '%s%s' field to be present", prefix, dtKey)) + continue + } + dt, ok := dtTmp.(int64) + if !ok { + if prefix != "" { + dtKey = "__" + dtKey + } + errs = append(errs, fmt.Errorf("expected '%s%s' field to have Unix timsestamp (epoch) format", prefix, dtKey)) + continue + } + + t := time.UnixMilli(dt) + + commentTmp, _ := subMap[commentKey].(string) + var comment *string + if commentTmp != "" { + comment = &commentTmp + } + + delete(subMap, dtKey) + delete(subMap, commentKey) + + var masked *bool + for subKey, val := range subMap { + if prefix != "" { + subKey = "__" + subKey + } + var v float64 + switch _val := val.(type) { + case float64: + v = _val + case string: + var err error + v, err = strconv.ParseFloat(_val, 64) + if err != nil { + tmp := true + masked = &tmp + v = math.NaN() + errs = append(errs, fmt.Errorf("invalid value type for field '%s%s' (must be number), using NaN", prefix, subKey)) + continue + } + default: + tmp := true + masked = &tmp + v = math.NaN() + errs = append(errs, fmt.Errorf("invalid value type for field '%s%s' (must be number) using NaN", prefix, subKey)) + continue + } + r, exists := eqt[prefix+subKey] + if !exists { + if !exists || r.TimeseriesID == nil { + errs = append(errs, fmt.Errorf("row %s%s does not exists in equivalency table or timeseries id not assigned", prefix, subKey)) + continue + } + } + mm = append(mm, db.Measurement{TimeseriesID: *r.TimeseriesID, Time: t, Value: v, Masked: masked, Annotation: comment}) + } + } + return mm, errors.Join(errs...) +} diff --git a/api/internal/service/timeseries.go b/api/internal/service/timeseries.go index c4825805..11a97cde 100644 --- a/api/internal/service/timeseries.go +++ b/api/internal/service/timeseries.go @@ -10,12 +10,15 @@ import ( func (s DBService) TimeseriesCreateBatch(ctx context.Context, tt []dto.Timeseries) ([]db.TimeseriesCreateBatchRow, error) { args := make([]db.TimeseriesCreateBatchParams, len(tt)) for idx, ts := range tt { + if ts.Type == "" { + ts.Type = string(db.TimeseriesTypeStandard) + } args[idx] = db.TimeseriesCreateBatchParams{ InstrumentID: &ts.InstrumentID, Name: ts.Name, ParameterID: ts.ParameterID, UnitID: ts.UnitID, - Type: db.TimeseriesTypeStandard, + Type: db.TimeseriesType(ts.Type), } } diff --git a/api/internal/service/uploader.go b/api/internal/service/uploader.go index d64cdbf0..ea36ae2c 100644 --- a/api/internal/service/uploader.go +++ b/api/internal/service/uploader.go @@ -7,48 +7,57 @@ import ( "github.com/USACE/instrumentation-api/api/internal/db" "github.com/USACE/instrumentation-api/api/internal/dto" + "github.com/USACE/instrumentation-api/api/internal/upload" "github.com/google/uuid" ) func (s DBService) UploaderConfigCreate(ctx context.Context, uc dto.UploaderConfig) (uuid.UUID, error) { return s.Queries.UploaderConfigCreate(ctx, db.UploaderConfigCreateParams{ - ProjectID: uc.ProjectID, - Name: uc.Name, - Description: uc.Description, - Type: db.UploaderConfigType(uc.Type), - TzName: uc.TzName, - CreatedBy: uc.CreatedBy, - CreatedAt: uc.CreatedAt, - TimeField: uc.TimeField, - ValidatedFieldEnabled: uc.ValidatedFieldEnabled, - ValidatedField: uc.ValidatedField, - MaskedFieldEnabled: uc.MaskedFieldEnabled, - MaskedField: uc.MaskedField, - CommentFieldEnabled: uc.CommentFieldEnabled, - CommentField: uc.CommentField, - ColumnOffset: uc.ColumnOffset, - RowOffset: uc.RowOffset, + ProjectID: uc.ProjectID, + Name: uc.Name, + Description: uc.Description, + Type: db.UploaderConfigType(uc.Type), + TzName: uc.TzName, + CreatedBy: uc.CreatedBy, + CreatedAt: uc.CreatedAt, + TimeField: uc.TimeField, + ValidatedFieldEnabled: uc.ValidatedFieldEnabled, + ValidatedField: uc.ValidatedField, + MaskedFieldEnabled: uc.MaskedFieldEnabled, + MaskedField: uc.MaskedField, + CommentFieldEnabled: uc.CommentFieldEnabled, + CommentField: uc.CommentField, + ColumnOffset: uc.ColumnOffset, + RowOffset: uc.RowOffset, + DepthBasedInstrumentID: uc.DepthBasedInstrumentID, + InstrumentFieldEnabled: uc.InstrumentFieldEnabled, + InstrumentField: uc.InstrumentField, + XlsxSheetName: uc.XlsxSheetName, }) } func (s DBService) UploaderConfigUpdate(ctx context.Context, uc dto.UploaderConfig) error { return s.Queries.UploaderConfigUpdate(ctx, db.UploaderConfigUpdateParams{ - ID: uc.ID, - Name: uc.Name, - Description: uc.Description, - Type: db.UploaderConfigType(uc.Type), - TzName: uc.TzName, - UpdatedBy: uc.UpdatedBy, - UpdatedAt: uc.UpdatedAt, - TimeField: uc.TimeField, - ValidatedFieldEnabled: uc.ValidatedFieldEnabled, - ValidatedField: uc.ValidatedField, - MaskedFieldEnabled: uc.MaskedFieldEnabled, - MaskedField: uc.MaskedField, - CommentFieldEnabled: uc.CommentFieldEnabled, - CommentField: uc.CommentField, - ColumnOffset: uc.ColumnOffset, - RowOffset: uc.RowOffset, + ID: uc.ID, + Name: uc.Name, + Description: uc.Description, + Type: db.UploaderConfigType(uc.Type), + TzName: uc.TzName, + UpdatedBy: uc.UpdatedBy, + UpdatedAt: uc.UpdatedAt, + TimeField: uc.TimeField, + ValidatedFieldEnabled: uc.ValidatedFieldEnabled, + ValidatedField: uc.ValidatedField, + MaskedFieldEnabled: uc.MaskedFieldEnabled, + MaskedField: uc.MaskedField, + CommentFieldEnabled: uc.CommentFieldEnabled, + CommentField: uc.CommentField, + ColumnOffset: uc.ColumnOffset, + RowOffset: uc.RowOffset, + DepthBasedInstrumentID: uc.DepthBasedInstrumentID, + InstrumentFieldEnabled: uc.InstrumentFieldEnabled, + InstrumentField: uc.InstrumentField, + XlsxSheetName: uc.XlsxSheetName, }) } @@ -56,9 +65,10 @@ func (s DBService) UploaderConfigMappingCreateBatch(ctx context.Context, ucID uu args := make([]db.UploaderConfigMappingCreateBatchParams, len(mm)) for idx, m := range mm { args[idx] = db.UploaderConfigMappingCreateBatchParams{ - UploaderConfigID: ucID, - FieldName: m.FieldName, - TimeseriesID: m.TimeseriesID, + UploaderConfigID: ucID, + FieldName: m.FieldName, + TimeseriesID: m.TimeseriesID, + InstrumentFieldName: m.InstrumentFieldName, } } var err error @@ -105,8 +115,12 @@ func (s DBService) UploaderConfigUploadFile(ctx context.Context, projectID, ucID if err != nil { return err } + switch uc.Type { case db.UploaderConfigTypeCsv: + if uc.TimeField == "" { + return errors.New("required time field must be set") + } ucm, err := qtx.UploaderConfigMappingList(ctx, uc.ID) if err != nil { return err @@ -123,17 +137,18 @@ func (s DBService) UploaderConfigUploadFile(ctx context.Context, projectID, ucID return err } case db.UploaderConfigTypeDux: - if uc.DepthBasedInstrumentID == nil { - return errors.New("dux file uploads must have inclinometer instrument assignment") + if err := timeseriesMeasurementCreateBatchFromDUXFile(ctx, qtx, r, uc); err != nil { + return err + } + case db.UploaderConfigTypeXlsx: + if uc.TimeField == "" { + return errors.New("required time field must be set") } - segs, err := qtx.InclSegmentListForInstrument(ctx, *uc.DepthBasedInstrumentID) + ucm, err := qtx.UploaderConfigMappingList(ctx, uc.ID) if err != nil { return err } - if len(segs) == 0 { - return errors.New("no segments found for assigned inclinometer") - } - if err := timeseriesMeasurementCreateBatchFromDuxFile(ctx, qtx, r, uc, segs); err != nil { + if err := timeseriesMeasurementCreateBatchFromXLSXFile(ctx, qtx, r, uc, ucm); err != nil { return err } default: @@ -142,3 +157,108 @@ func (s DBService) UploaderConfigUploadFile(ctx context.Context, projectID, ucID return tx.Commit(ctx) } + +func timeseriesMeasurementCreateBatchFromCSVFile(ctx context.Context, q *db.Queries, r io.Reader, uc db.VUploaderConfig, ucm []db.UploaderConfigMapping) error { + fieldNameTimeseriesIDMap := make(map[string]uuid.UUID) + for _, mapping := range ucm { + if mapping.TimeseriesID == nil { + continue + } + fn := mapping.FieldName + if uc.InstrumentFieldEnabled && uc.InstrumentField != nil && mapping.InstrumentFieldName != nil { + fn += *mapping.InstrumentFieldName + } + fieldNameTimeseriesIDMap[fn] = *mapping.TimeseriesID + } + + up, err := upload.NewUpload(upload.UploadOpts{ + FieldNameTimeseriesIDMap: fieldNameTimeseriesIDMap, + PreferDayFirst: uc.PreferDayFirst, + Timezone: uc.TzName, + ColOffset: int(uc.ColumnOffset), + RowOffset: int(uc.RowOffset), + Queries: q, + }) + if err != nil { + return err + } + + if err := up.TimeseriesMeasurementNoteCreateOrUpdateBatchCSV(ctx, r, uc); err != nil { + return err + } + return nil +} + +func timeseriesMeasurementCreateBatchFromDUXFile(ctx context.Context, q *db.Queries, r io.Reader, uc db.VUploaderConfig) error { + if uc.DepthBasedInstrumentID == nil { + return errors.New("dux file uploads must have inclinometer instrument assignment") + } + segs, err := q.InclSegmentListForInstrument(ctx, *uc.DepthBasedInstrumentID) + if err != nil { + return err + } + if len(segs) == 0 { + return errors.New("no segments found for assigned inclinometer") + } + up, err := upload.NewUpload(upload.UploadOpts{ + Timezone: uc.TzName, + TimeFormat: "2006/01/02 15:04:05", + Queries: q, + }) + if err != nil { + return err + } + return up.TimeseriesMeasurementNoteCreateOrUpdateBatchDUX(ctx, r, uc, segs) +} + +// non-datalogger toa5/dat parser (use uploader config) toa5 is sub-spec using csv parser +func timeseriesMeasurementCreateBatchFromTOA5File(ctx context.Context, q *db.Queries, r io.Reader, uc db.VUploaderConfig, ucm []db.UploaderConfigMapping) error { + fieldNameTimeseriesIDMap := make(map[string]uuid.UUID) + for _, mapping := range ucm { + if mapping.TimeseriesID == nil { + continue + } + fieldNameTimeseriesIDMap[mapping.FieldName] = *mapping.TimeseriesID + } + + up, err := upload.NewUpload(upload.UploadOpts{ + FieldNameTimeseriesIDMap: fieldNameTimeseriesIDMap, + Timezone: uc.TzName, + ColOffset: 2, + RowOffset: 0, + Queries: q, + }) + if err != nil { + return err + } + + return up.TimeseriesMeasurementNoteCreateOrUpdateBatchTOA5(ctx, r, uc) +} + +func timeseriesMeasurementCreateBatchFromXLSXFile(ctx context.Context, q *db.Queries, r io.Reader, uc db.VUploaderConfig, ucm []db.UploaderConfigMapping) error { + fieldNameTimeseriesIDMap := make(map[string]uuid.UUID) + for _, mapping := range ucm { + if mapping.TimeseriesID == nil { + continue + } + fn := mapping.FieldName + if uc.InstrumentFieldEnabled && uc.InstrumentField != nil && mapping.InstrumentFieldName != nil { + fn += *mapping.InstrumentFieldName + } + fieldNameTimeseriesIDMap[fn] = *mapping.TimeseriesID + } + + up, err := upload.NewUpload(upload.UploadOpts{ + FieldNameTimeseriesIDMap: fieldNameTimeseriesIDMap, + PreferDayFirst: uc.PreferDayFirst, + Timezone: uc.TzName, + ColOffset: int(uc.ColumnOffset), + RowOffset: int(uc.RowOffset), + Queries: q, + }) + if err != nil { + return err + } + + return up.TimeseriesMeasurementNoteCreateOrUpdateBatchXLSX(ctx, r, uc) +} diff --git a/api/internal/service/uploader_parser.go b/api/internal/service/uploader_parser.go deleted file mode 100644 index 2a7ccd43..00000000 --- a/api/internal/service/uploader_parser.go +++ /dev/null @@ -1,337 +0,0 @@ -package service - -import ( - "context" - "encoding/csv" - "errors" - "fmt" - "io" - "math" - "strconv" - "strings" - "time" - - "github.com/USACE/instrumentation-api/api/internal/db" - "github.com/google/uuid" - "github.com/jackc/pgx/v5/pgtype" -) - -type timeseriesMeasurementNoteCreateOrUpdateBatchParams struct { - fields []string - fieldNameTimeseriesIDMap map[string]uuid.UUID - maskedColIdx int - validatedColIdx int - commentColIdx int - timezone string - timeFieldIdx int - colOffset int - rowOffset int -} - -// non-datalogger toa5/dat parser (use uploader config) -func timeseriesMeasurementCreateBatchFromTOA5File(ctx context.Context, q *db.Queries, r io.Reader, uc db.VUploaderConfig, ucm []db.UploaderConfigMapping) error { - reader := csv.NewReader(r) - _, err := reader.Read() - if err != nil { - return err - } - fieldHeader, err := reader.Read() - if err != nil { - return err - } - _, err = reader.Read() - if err != nil { - return err - } - _, err = reader.Read() - if err != nil { - return err - } - - // first two columns are timestamp and record number - // we only want to collect the measurement fields here - fields := make([]string, len(fieldHeader)-2) - for i := 2; i < len(fieldHeader); i++ { - fields[i] = fieldHeader[i] - } - - fieldNameTimeseriesIDMap := make(map[string]uuid.UUID) - for _, mapping := range ucm { - if mapping.TimeseriesID == nil { - continue - } - fieldNameTimeseriesIDMap[mapping.FieldName] = *mapping.TimeseriesID - } - - if err := timeseriesMeasurementNoteCreateOrUpdateBatch(ctx, q, reader, timeseriesMeasurementNoteCreateOrUpdateBatchParams{ - fields: fields, - fieldNameTimeseriesIDMap: fieldNameTimeseriesIDMap, - timezone: uc.TzName, - colOffset: 2, - }); err != nil { - return err - } - return nil -} - -// cusom csv parser -func timeseriesMeasurementCreateBatchFromCSVFile(ctx context.Context, q *db.Queries, r io.Reader, uc db.VUploaderConfig, ucm []db.UploaderConfigMapping) error { - reader := csv.NewReader(r) - fieldHeader, err := reader.Read() - if err != nil { - return err - } - - timeFieldIdx := -1 - - fields := make([]string, len(fieldHeader)) - for idx := range fieldHeader { - header := fieldHeader[idx] - switch { - case uc.TimeField == header: - timeFieldIdx = idx - default: - fields[idx] = header - } - } - - if timeFieldIdx == -1 { - return errors.New("time field specified in uploader config does not exist") - } - - fieldNameTimeseriesIDMap := make(map[string]uuid.UUID) - for _, mapping := range ucm { - if mapping.TimeseriesID == nil { - continue - } - fieldNameTimeseriesIDMap[mapping.FieldName] = *mapping.TimeseriesID - } - - if err := timeseriesMeasurementNoteCreateOrUpdateBatch(ctx, q, reader, timeseriesMeasurementNoteCreateOrUpdateBatchParams{ - fields: fields, - fieldNameTimeseriesIDMap: fieldNameTimeseriesIDMap, - timezone: uc.TzName, - }); err != nil { - return err - } - return nil -} - -func timeseriesMeasurementCreateBatchFromDuxFile(ctx context.Context, q *db.Queries, r io.Reader, uc db.VUploaderConfig, segs []db.VInclSegment) error { - reader := csv.NewReader(r) - _, err := reader.Read() - if err != nil { - return err - } - h2, err := reader.Read() - if err != nil { - return err - } - rows, err := reader.ReadAll() - if err != nil { - return err - } - if len(segs) < len(rows) { - return errors.New("more measurements in file than segments in inclinometer") - } - - if len(h2) < 2 { - return errors.New("invalid format for \"dux\" file") - } - t, err := time.Parse(h2[1], "2006/01/02 15:04:05") - if err != nil { - return err - } - - createMmtParams := make([]db.TimeseriesMeasurementCreateOrUpdateAtTimezoneBatchParams, len(rows)*5) - - // inclinometer depth must be in ascending order - for idx, row := range rows { - if len(row) < 5 { - return fmt.Errorf("invalid number of cell (%d) in one or more rows for dux file: 5 cells needed", len(row)) - } - if segs[idx].DepthTimeseriesID == nil || - segs[idx].A0TimeseriesID == nil || - segs[idx].A180TimeseriesID == nil || - segs[idx].B0TimeseriesID == nil || - segs[idx].B180TimeseriesID == nil { - return errors.New("one or more segments do not have required timeseries assignments (depth, a0, a18, b0, b180)") - } - depth, err := strconv.ParseFloat(row[0], 64) - if err != nil { - return err - } - a0, err := strconv.ParseFloat(row[1], 64) - if err != nil { - return err - } - a180, err := strconv.ParseFloat(row[2], 64) - if err != nil { - return err - } - b0, err := strconv.ParseFloat(row[3], 64) - if err != nil { - return err - } - b180, err := strconv.ParseFloat(row[4], 64) - if err != nil { - return err - } - - createMmtParams[idx] = db.TimeseriesMeasurementCreateOrUpdateAtTimezoneBatchParams{ - TimeseriesID: *segs[idx].DepthTimeseriesID, - Timezone: uc.TzName, - LocalTime: pgtype.Timestamp{ - Time: t, - Valid: true, - }, - Value: depth, - } - createMmtParams[idx+1] = db.TimeseriesMeasurementCreateOrUpdateAtTimezoneBatchParams{ - TimeseriesID: *segs[idx].A0TimeseriesID, - Timezone: uc.TzName, - LocalTime: pgtype.Timestamp{ - Time: t, - Valid: true, - }, - Value: a0, - } - createMmtParams[idx+2] = db.TimeseriesMeasurementCreateOrUpdateAtTimezoneBatchParams{ - TimeseriesID: *segs[idx].A180TimeseriesID, - Timezone: uc.TzName, - LocalTime: pgtype.Timestamp{ - Time: t, - Valid: true, - }, - Value: a180, - } - createMmtParams[idx+3] = db.TimeseriesMeasurementCreateOrUpdateAtTimezoneBatchParams{ - TimeseriesID: *segs[idx].B0TimeseriesID, - Timezone: uc.TzName, - LocalTime: pgtype.Timestamp{ - Time: t, - Valid: true, - }, - Value: b0, - } - createMmtParams[idx+4] = db.TimeseriesMeasurementCreateOrUpdateAtTimezoneBatchParams{ - TimeseriesID: *segs[idx].B180TimeseriesID, - Timezone: uc.TzName, - LocalTime: pgtype.Timestamp{ - Time: t, - Valid: true, - }, - Value: b180, - } - } - - q.TimeseriesMeasurementCreateOrUpdateAtTimezoneBatch(ctx, createMmtParams).Exec(batchExecErr(&err)) - return err -} - -func timeseriesMeasurementNoteCreateOrUpdateBatch(ctx context.Context, q *db.Queries, csvReader *csv.Reader, arg timeseriesMeasurementNoteCreateOrUpdateBatchParams) error { - chunkSize := 1_000 - createMmtParams := make([]db.TimeseriesMeasurementCreateOrUpdateAtTimezoneBatchParams, chunkSize) - createNoteParams := make([]db.TimeseriesNoteCreateOrUpdateAtTimezoneBatchParams, chunkSize) - var mmtIdx, noteIdx int - for { - record, err := csvReader.Read() - if err == io.EOF { - break - } - if err != nil { - return err - } - - // TODO: do we need to accept other time formats? For now RFC3339 only - t, err := time.Parse(record[arg.timeFieldIdx], time.RFC3339) - if err != nil { - return err - } - - // get notes content and apply to all timeseries in loop - // TODO: this is not space efficient and should change to a range-based approach - hasNotes := arg.maskedColIdx != -1 || arg.validatedColIdx != -1 || arg.commentColIdx != -1 - - for idx, cell := range record[arg.colOffset:] { - fieldName := arg.fields[idx] - tsID, ok := arg.fieldNameTimeseriesIDMap[fieldName] - if ok { - continue - } - v, err := strconv.ParseFloat(cell, 64) - if err != nil { - continue - } - createMmtParams[mmtIdx] = db.TimeseriesMeasurementCreateOrUpdateAtTimezoneBatchParams{ - TimeseriesID: tsID, - LocalTime: pgtype.Timestamp{Time: t, Valid: true}, - Timezone: arg.timezone, - Value: v, - } - mmtIdx++ - if mmtIdx == chunkSize { - var err error - q.TimeseriesMeasurementCreateOrUpdateAtTimezoneBatch(ctx, createMmtParams).Exec(batchExecErr(&err)) - if err != nil { - return err - } - mmtIdx = 0 - } - - // automask NaN values - hasNaN := math.IsNaN(v) || math.IsInf(v, 0) - if hasNotes || hasNaN { - var masked *bool - var validated *bool - var comment *string - if arg.maskedColIdx != -1 { - maskedVal := strings.ToLower(record[arg.maskedColIdx]) == "true" - masked = &maskedVal - } else if hasNaN { - masked = &hasNaN - } - if arg.validatedColIdx != -1 { - validatedVal := strings.ToLower(record[arg.validatedColIdx]) == "true" - validated = &validatedVal - } - if arg.commentColIdx != -1 { - commentVal := strings.ToLower(record[arg.validatedColIdx]) - comment = &commentVal - } - createNoteParams[noteIdx] = db.TimeseriesNoteCreateOrUpdateAtTimezoneBatchParams{ - TimeseriesID: tsID, - LocalTime: pgtype.Timestamp{Time: t, Valid: true}, - Timezone: arg.timezone, - Masked: masked, - Validated: validated, - Annotation: comment, - } - noteIdx++ - if noteIdx == chunkSize { - var err error - q.TimeseriesNoteCreateOrUpdateAtTimezoneBatch(ctx, createNoteParams).Exec(batchExecErr(&err)) - if err != nil { - return err - } - noteIdx = 0 - } - } - } - } - if mmtIdx != 0 { - var err error - q.TimeseriesMeasurementCreateOrUpdateAtTimezoneBatch(ctx, createMmtParams[:mmtIdx]).Exec(batchExecErr(&err)) - if err != nil { - return err - } - } - if noteIdx != 0 { - var err error - q.TimeseriesNoteCreateOrUpdateAtTimezoneBatch(ctx, createNoteParams[:noteIdx]).Exec(batchExecErr(&err)) - if err != nil { - return err - } - } - return nil -} diff --git a/api/internal/tz/tz.go b/api/internal/tz/tz.go new file mode 100644 index 00000000..0ce0ebdc --- /dev/null +++ b/api/internal/tz/tz.go @@ -0,0 +1,37 @@ +package tz + +import ( + "fmt" + "os" + "strings" +) + +type timezones struct { + zz []string +} + +func ListTimezones(zoneDir string) ([]string, error) { + t := timezones{zz: make([]string, 0)} + err := t.walkTimezoneDirs(zoneDir, "") + return t.zz, err +} + +func (t *timezones) walkTimezoneDirs(zoneDir, path string) error { + files, err := os.ReadDir(zoneDir + path) + if err != nil { + return fmt.Errorf("Unable to load timezones from directory %s ; %w", zoneDir+path, err) + } + for _, f := range files { + if f.Name() != strings.ToUpper(f.Name()[:1])+f.Name()[1:] { + continue + } + if f.IsDir() { + if err := t.walkTimezoneDirs(zoneDir, path+"/"+f.Name()); err != nil { + return err + } + } else { + t.zz = append(t.zz, (path + "/" + f.Name())[1:]) + } + } + return nil +} diff --git a/api/internal/upload/csv.go b/api/internal/upload/csv.go new file mode 100644 index 00000000..3f925b66 --- /dev/null +++ b/api/internal/upload/csv.go @@ -0,0 +1,58 @@ +package upload + +import ( + "context" + "encoding/csv" + "io" + + "github.com/USACE/instrumentation-api/api/internal/db" + "golang.org/x/text/encoding" + "golang.org/x/text/encoding/unicode" + "golang.org/x/text/transform" +) + +// BOMAwareCSVReader will detect a UTF BOM (Byte Order Mark) at the +// start of the data and transform to UTF8 accordingly. +// If there is no BOM, it will read the data without any transformation. +func BOMAwareCSVReader(reader io.Reader) *csv.Reader { + transformer := unicode.BOMOverride(encoding.Nop.NewDecoder()) + return csv.NewReader(transform.NewReader(reader, transformer)) +} + +func (up *Upload) TimeseriesMeasurementNoteCreateOrUpdateBatchCSV(ctx context.Context, r io.Reader, uc db.VUploaderConfig) error { + csvReader := BOMAwareCSVReader(r) + csvReader.LazyQuotes = true + h1, err := csvReader.Read() + if err != nil { + return err + } + colCfg := NewColumnConfigFromHeaderOpts(HeaderOpts{ + headerCols: h1, + timeField: uc.TimeField, + maskedField: uc.MaskedField, + validatedField: uc.ValidatedField, + commentField: uc.CommentField, + instrumentField: uc.InstrumentField, + }) + if err := up.WithColumnConfig(colCfg); err != nil { + return err + } + + return up.createOrUpdateFromCsvRows(ctx, csvReader) +} + +func (up *Upload) createOrUpdateFromCsvRows(ctx context.Context, csvReader *csv.Reader) error { + for { + record, err := csvReader.Read() + if err == io.EOF { + break + } + if err != nil { + return err + } + if err := up.UploadMeasurementNoteBatchRow(ctx, record); err != nil { + return err + } + } + return up.UploadMeasurementNoteBatchRemaining(ctx) +} diff --git a/api/internal/upload/dux.go b/api/internal/upload/dux.go new file mode 100644 index 00000000..88082a2b --- /dev/null +++ b/api/internal/upload/dux.go @@ -0,0 +1,142 @@ +package upload + +import ( + "context" + "encoding/csv" + "errors" + "fmt" + "io" + "strconv" + "time" + + "github.com/USACE/instrumentation-api/api/internal/db" +) + +func (up *Upload) TimeseriesMeasurementNoteCreateOrUpdateBatchDUX(ctx context.Context, r io.Reader, uc db.VUploaderConfig, segs []db.VInclSegment) error { + csvReader := csv.NewReader(r) + csvReader.FieldsPerRecord = -1 + _, err := csvReader.Read() + if err != nil { + return err + } + h2, err := csvReader.Read() + if err != nil { + return err + } + if len(h2) < 2 { + return errors.New("invalid format for \"dux\" file") + } + surveyName := h2[0] + t, err := time.ParseInLocation(up.TimeFormat, h2[1], up.Location) + if err != nil { + return err + } + + up.createMmtParams = make([]db.TimeseriesMeasurementCreateOrUpdateBatchParams, up.chunkSize) // 1k + + // inclinometer depth must be in ascending order + var segIdx int + for { + row, err := csvReader.Read() + if errors.Is(err, io.EOF) { + if segIdx <= len(segs)-1 { + return errors.New("not enough data in dux to populate segments in instrument") + } + break + } + if err != nil { + return err + } + if segIdx > len(segs)-1 { + return errors.New("") + } + if len(row) < 5 { + return fmt.Errorf("invalid number of cell (%d) in one or more rows for dux file: 5 cells needed", len(row)) + } + if row[0] == surveyName { + if segIdx <= len(segs)-1 { + return errors.New("not enough data in dux to populate segments in instrument") + } + var err error + t, err = time.ParseInLocation(up.TimeFormat, row[1], up.Location) + if err != nil { + return err + } + segIdx = 0 + continue + } + if segs[segIdx].DepthTimeseriesID == nil || + segs[segIdx].A0TimeseriesID == nil || + segs[segIdx].A180TimeseriesID == nil || + segs[segIdx].B0TimeseriesID == nil || + segs[segIdx].B180TimeseriesID == nil { + return errors.New("one or more segments do not have required timeseries assignments (depth, a0, a18, b0, b180)") + } + depth, err := strconv.ParseFloat(row[0], 64) + if err != nil { + return err + } + a0, err := strconv.ParseFloat(row[1], 64) + if err != nil { + return err + } + a180, err := strconv.ParseFloat(row[2], 64) + if err != nil { + return err + } + b0, err := strconv.ParseFloat(row[3], 64) + if err != nil { + return err + } + b180, err := strconv.ParseFloat(row[4], 64) + if err != nil { + return err + } + + up.createMmtParams[up.mmtIdx] = db.TimeseriesMeasurementCreateOrUpdateBatchParams{ + TimeseriesID: *segs[segIdx].DepthTimeseriesID, + Time: t, + Value: depth, + } + up.createMmtParams[up.mmtIdx+1] = db.TimeseriesMeasurementCreateOrUpdateBatchParams{ + TimeseriesID: *segs[segIdx].A0TimeseriesID, + Time: t, + Value: a0, + } + up.createMmtParams[up.mmtIdx+2] = db.TimeseriesMeasurementCreateOrUpdateBatchParams{ + TimeseriesID: *segs[segIdx].A180TimeseriesID, + Time: t, + Value: a180, + } + up.createMmtParams[up.mmtIdx+3] = db.TimeseriesMeasurementCreateOrUpdateBatchParams{ + TimeseriesID: *segs[segIdx].B0TimeseriesID, + Time: t, + Value: b0, + } + up.createMmtParams[up.mmtIdx+4] = db.TimeseriesMeasurementCreateOrUpdateBatchParams{ + TimeseriesID: *segs[segIdx].B180TimeseriesID, + Time: t, + Value: b180, + } + up.mmtIdx += 5 + segIdx++ + if up.mmtIdx == up.chunkSize { + up.Queries.TimeseriesMeasurementCreateOrUpdateBatch(ctx, up.createMmtParams).Exec(func(_ int, e error) { + if e != nil { + err = e + return + } + }) + up.mmtIdx = 0 + } + } + if up.mmtIdx != 0 { + up.Queries.TimeseriesMeasurementCreateOrUpdateBatch(ctx, up.createMmtParams).Exec(func(_ int, e error) { + if e != nil { + err = e + return + } + }) + } + return err +} diff --git a/api/internal/upload/toa5.go b/api/internal/upload/toa5.go new file mode 100644 index 00000000..f5dc6bcc --- /dev/null +++ b/api/internal/upload/toa5.go @@ -0,0 +1,125 @@ +package upload + +import ( + "context" + "encoding/csv" + "io" + + "github.com/USACE/instrumentation-api/api/internal/db" + "github.com/USACE/instrumentation-api/api/internal/dto" + "github.com/google/uuid" +) + +func (up *Upload) TimeseriesMeasurementNoteCreateOrUpdateBatchTOA5(ctx context.Context, r io.Reader, uc db.VUploaderConfig) error { + csvReader := csv.NewReader(r) + csvReader.FieldsPerRecord = -1 + _, err := csvReader.Read() + if err != nil { + return err + } + h2, err := csvReader.Read() + if err != nil { + return err + } + _, err = csvReader.Read() + if err != nil { + return err + } + _, err = csvReader.Read() + if err != nil { + return err + } + + // first two columns are timestamp and record number + // we only want to collect the measurement fields here + fields := make([]string, len(h2)-2) + for i := 2; i < len(h2); i++ { + fields[i] = h2[i] + } + + up.ColumnConfig = ColumnConfig{ + Fields: fields, + TimeColIdx: 0, + MaskedColIdx: -1, + ValidatedColIdx: -1, + CommentColIdx: -1, + InstColIdx: -1, + } + + return up.createOrUpdateFromCsvRows(ctx, csvReader) +} + +func (up *Upload) TimeseriesMeasurementCreteOrUpdateBatchDataloggerTOA5(ctx context.Context, r io.Reader) error { + csvReader := csv.NewReader(r) + + envHeader, err := csvReader.Read() + if err != nil { + return err + } + h1, err := csvReader.Read() + if err != nil { + return err + } + // skip units header + _, err = csvReader.Read() + if err != nil { + return err + } + // skip process header + _, err = csvReader.Read() + if err != nil { + return err + } + + meta := dto.Environment{ + Model: envHeader[2], + SerialNo: envHeader[3], + TableName: envHeader[6], + } + + dl, err := up.Queries.DataloggerGetForModelSn(ctx, db.DataloggerGetForModelSnParams{ + Model: &meta.Model, + Sn: meta.SerialNo, + }) + if err != nil { + return err + } + tableID, err := up.Queries.DataloggerTableGetOrCreate(ctx, db.DataloggerTableGetOrCreateParams{ + DataloggerID: dl.ID, + TableName: meta.TableName, + }) + if err != nil { + return err + } + + // first two columns are timestamp and record number + // we only want to collect the measurement fields here + fields := make([]string, len(h1)-2) + for i := 2; i < len(h1); i++ { + fields[i] = h1[i] + } + + eqt, err := up.Queries.EquivalencyTableGet(ctx, tableID) + if err != nil { + return err + } + + fieldNameTimeseriesIDMap := make(map[string]uuid.UUID) + for _, eqtRow := range eqt.Fields { + if eqtRow.TimeseriesID == nil { + continue + } + fieldNameTimeseriesIDMap[eqtRow.FieldName] = *eqtRow.TimeseriesID + } + + up.FieldNameTimeseriesIDMap = fieldNameTimeseriesIDMap + up.ColumnConfig = ColumnConfig{ + Fields: fields, + TimeColIdx: 0, + MaskedColIdx: -1, + ValidatedColIdx: -1, + CommentColIdx: -1, + InstColIdx: -1, + } + return up.createOrUpdateFromCsvRows(ctx, csvReader) +} diff --git a/api/internal/upload/upload.go b/api/internal/upload/upload.go new file mode 100644 index 00000000..b9a02f9b --- /dev/null +++ b/api/internal/upload/upload.go @@ -0,0 +1,270 @@ +package upload + +import ( + "context" + "errors" + "math" + "strconv" + "strings" + "time" + + "github.com/USACE/instrumentation-api/api/internal/db" + "github.com/araddon/dateparse" + "github.com/google/uuid" +) + +func NewUpload(opts UploadOpts) (*Upload, error) { + if opts.Queries == nil { + return nil, errors.New("must provide Queries struct") + } + if opts.Timezone != "" { + loc, err := time.LoadLocation(opts.Timezone) + if err != nil { + return nil, err + } + opts.Location = loc + } + if opts.FieldNameTimeseriesIDMap == nil { + opts.FieldNameTimeseriesIDMap = make(map[string]uuid.UUID) + } + up := Upload{UploadOpts: opts} + up.chunkSize = 1_000 + up.createMmtParams = make([]db.TimeseriesMeasurementCreateOrUpdateBatchParams, up.chunkSize) + up.createNoteParams = make([]db.TimeseriesNoteCreateOrUpdateBatchParams, up.chunkSize) + + return &up, nil +} + +type Upload struct { + UploadOpts + uploadOptsInternal +} + +type UploadOpts struct { + // default uses mm/dd/yyyy when ambiguous + PreferDayFirst bool + FieldNameTimeseriesIDMap map[string]uuid.UUID + Timezone string + Location *time.Location + TimeFormat string + ColOffset int + RowOffset int + Queries *db.Queries + ColumnConfig +} + +func (up *Upload) WithColumnConfig(colCfg ColumnConfig) error { + if colCfg.TimeColIdx == -1 { + return errors.New("specified time field not found in header") + } + up.ColumnConfig = colCfg + return nil +} + +type ColumnConfig struct { + Fields []string + TimeColIdx int + InstColIdx int + MaskedColIdx int + ValidatedColIdx int + CommentColIdx int +} + +type HeaderOpts struct { + headerCols []string + timeField string + maskedField *string + validatedField *string + commentField *string + instrumentField *string +} + +func NewColumnConfigFromHeaderOpts(opts HeaderOpts) ColumnConfig { + a := ColumnConfig{ + Fields: make([]string, len(opts.headerCols)), + TimeColIdx: -1, + InstColIdx: -1, + MaskedColIdx: -1, + ValidatedColIdx: -1, + CommentColIdx: -1, + } + for idx, header := range opts.headerCols { + header = strings.Trim(header, "\"") + switch { + case opts.timeField == header: + a.TimeColIdx = idx + + case opts.maskedField != nil && *opts.maskedField == header: + a.MaskedColIdx = idx + + case opts.validatedField != nil && *opts.validatedField == header: + a.ValidatedColIdx = idx + + case opts.commentField != nil && *opts.commentField == header: + a.CommentColIdx = idx + + case opts.instrumentField != nil && *opts.instrumentField == header: + a.InstColIdx = idx + } + a.Fields[idx] = header + } + return a +} + +type uploadOptsInternal struct { + chunkSize int + createMmtParams []db.TimeseriesMeasurementCreateOrUpdateBatchParams + createNoteParams []db.TimeseriesNoteCreateOrUpdateBatchParams + mmtIdx int + noteIdx int + rowNumber int +} + +func (up Upload) UploadMeasurementNoteBatchRow(ctx context.Context, row []string) error { + if up.rowNumber < up.RowOffset { + return nil + } + up.rowNumber++ + + if up.ColOffset > len(row)-1 { + return errors.New("column offset (zero-based index) must be less than or equal to row length") + } + + var t time.Time + var err error + switch { + case up.TimeFormat != "" && up.Location != nil: + t, err = time.ParseInLocation(up.TimeFormat, row[up.TimeColIdx], up.Location) + if err != nil { + return err + } + case up.TimeFormat != "" && up.Location == nil: + t, err = time.Parse(up.TimeFormat, row[up.TimeColIdx]) + if err != nil { + return err + } + case up.TimeFormat == "" && up.Location != nil: + t, err = dateparse.ParseIn(row[up.TimeColIdx], up.Location, dateparse.PreferMonthFirst(!up.PreferDayFirst)) + if err != nil { + return err + } + case up.TimeFormat == "" && up.Location == nil: + t, err = dateparse.ParseAny(row[up.TimeColIdx], dateparse.PreferMonthFirst(!up.PreferDayFirst)) + if err != nil { + return err + } + default: + return errors.New("unable to set time with given configuration; this should never happen") + } + + var instName string + if up.InstColIdx != -1 { + instName = row[up.InstColIdx] + } + // get notes content and apply to all timeseries in loop + // TODO: this is not space efficient and should change to a range-based approach + hasNotes := up.MaskedColIdx != -1 || up.ValidatedColIdx != -1 || up.CommentColIdx != -1 + + for idx, cell := range row[up.ColOffset:] { + fieldName := up.Fields[idx] + instName + tsID, ok := up.FieldNameTimeseriesIDMap[fieldName] + if ok { + continue + } + v, err := strconv.ParseFloat(cell, 64) + if err != nil { + continue + } + up.createMmtParams[up.mmtIdx] = db.TimeseriesMeasurementCreateOrUpdateBatchParams{ + TimeseriesID: tsID, + Time: t, + Value: v, + } + up.mmtIdx++ + if up.mmtIdx == up.chunkSize { + var err error + up.Queries.TimeseriesMeasurementCreateOrUpdateBatch(ctx, up.createMmtParams).Exec(func(_ int, e error) { + if e != nil { + err = e + return + } + }) + if err != nil { + return err + } + up.mmtIdx = 0 + } + + // automask NaN values + hasNaN := math.IsNaN(v) || math.IsInf(v, 0) + if hasNotes || hasNaN { + var masked *bool + var validated *bool + var comment *string + if up.MaskedColIdx != -1 { + maskedVal := strings.ToLower(row[up.MaskedColIdx]) == "true" + masked = &maskedVal + } else if hasNaN { + masked = &hasNaN + } + if up.ValidatedColIdx != -1 { + validatedVal := strings.ToLower(row[up.ValidatedColIdx]) == "true" + validated = &validatedVal + } + if up.CommentColIdx != -1 { + commentVal := strings.ToLower(row[up.ValidatedColIdx]) + comment = &commentVal + } + up.createNoteParams[up.noteIdx] = db.TimeseriesNoteCreateOrUpdateBatchParams{ + TimeseriesID: tsID, + Time: t, + Masked: masked, + Validated: validated, + Annotation: comment, + } + up.noteIdx++ + if up.noteIdx == up.chunkSize { + var err error + up.Queries.TimeseriesNoteCreateOrUpdateBatch(ctx, up.createNoteParams).Exec(func(_ int, e error) { + if e != nil { + err = e + return + } + }) + if err != nil { + return err + } + up.noteIdx = 0 + } + } + } + return nil +} + +func (up *Upload) UploadMeasurementNoteBatchRemaining(ctx context.Context) error { + if up.mmtIdx != 0 { + var err error + up.Queries.TimeseriesMeasurementCreateOrUpdateBatch(ctx, up.createMmtParams[:up.mmtIdx]).Exec(func(_ int, e error) { + if e != nil { + err = e + return + } + }) + if err != nil { + return err + } + } + if up.noteIdx != 0 { + var err error + up.Queries.TimeseriesNoteCreateOrUpdateBatch(ctx, up.createNoteParams[:up.noteIdx]).Exec(func(_ int, e error) { + if e != nil { + err = e + return + } + }) + if err != nil { + return err + } + } + return nil +} diff --git a/api/internal/upload/upload_test.go b/api/internal/upload/upload_test.go new file mode 100644 index 00000000..91f48ea5 --- /dev/null +++ b/api/internal/upload/upload_test.go @@ -0,0 +1,120 @@ +package upload_test + +// import ( +// "bytes" +// "context" +// "testing" +// +// "github.com/USACE/instrumentation-api/api/internal/db" +// "github.com/USACE/instrumentation-api/api/internal/upload" +// "github.com/google/uuid" +// "github.com/stretchr/testify/assert" +// ) +// +// func TestCSVUpload(t *testing.T) { +// testTimeseriesID := uuid.New() +// fieldNameMap := map[string]uuid.UUID{ +// "testField": testTimeseriesID, +// } +// +// upload := upload.NewUpload(upload.UploadOpts{ +// FieldNameTimeseriesIDMap: fieldNameMap, +// Timezone: "UTC", +// Queries: mockQueries, +// ColOffset: 0, +// RowOffset: 0, +// ColumnConfig: upload.ColumnConfig{ +// Fields: []string{"testField"}, +// TimeColIdx: 0, +// InstColIdx: -1, +// }, +// }) +// +// r := bytes.NewReader([]byte("time,testField\n2023-01-01T00:00:00Z,10.5\n")) +// +// err := upload.TimeseriesMeasurementNoteCreateOrUpdateBatchCSV(context.Background(), r, db.VUploaderConfig{ +// TimeField: "time", +// }) +// +// assert.NoError(t, err) +// mockQueries.AssertExpectations(t) +// } +// +// func TestTOA5Upload(t *testing.T) { +// testTimeseriesID := uuid.New() +// fieldNameMap := map[string]uuid.UUID{ +// "testField": testTimeseriesID, +// } +// +// upload := upload.NewUpload(upload.UploadOpts{ +// FieldNameTimeseriesIDMap: fieldNameMap, +// Timezone: "UTC", +// Queries: mockQueries, +// }) +// +// r := bytes.NewReader([]byte(`"TOA5","Station","Table_Name" +// "TIMESTAMP","RECORD" +// "2023-01-01 00:00:00","1" +// "testValue1","testValue2" +// `)) +// +// err := upload.TimeseriesMeasurementNoteCreateOrUpdateBatchTOA5(context.Background(), r, db.VUploaderConfig{}) +// +// assert.NoError(t, err) +// mockQueries.AssertExpectations(t) +// } +// +// func TestXLSXUpload(t *testing.T) { +// testTimeseriesID := uuid.New() +// sheetName := "Sheet1" +// +// upload := upload.NewUpload(upload.UploadOpts{ +// FieldNameTimeseriesIDMap: map[string]uuid.UUID{"testField": testTimeseriesID}, +// Timezone: "UTC", +// Queries: mockQueries, +// }) +// +// // Note: This test uses a mock. For a real XLSX test, you'd need an actual XLSX file +// err := upload.TimeseriesMeasurementNoteCreateOrUpdateBatchXLSX(context.Background(), bytes.NewReader([]byte{}), db.VUploaderConfig{ +// TimeField: "time", +// XlsxSheetName: &sheetName, +// }) +// +// assert.Error(t, err) // Expect error due to empty file +// mockQueries.AssertExpectations(t) +// } +// +// func TestDUXUpload(t *testing.T) { +// depthTSID := uuid.New() +// a0TSID := uuid.New() +// a180TSID := uuid.New() +// b0TSID := uuid.New() +// b180TSID := uuid.New() +// +// segments := []db.VInclSegment{ +// { +// DepthTimeseriesID: &depthTSID, +// A0TimeseriesID: &a0TSID, +// A180TimeseriesID: &a180TSID, +// B0TimeseriesID: &b0TSID, +// B180TimeseriesID: &b180TSID, +// }, +// } +// +// upload := upload.NewUpload(upload.UploadOpts{ +// Timezone: "UTC", +// Queries: mockQueries, +// }) +// +// r := bytes.NewReader([]byte(`header1,header2 +// 2024/01/01 12:00:00,timestamp +// 1.0,2.0,3.0,4.0,5.0 +// `)) +// +// err := upload.TimeseriesMeasurementNoteCreateOrUpdateBatchDUX(context.Background(), r, db.VUploaderConfig{ +// TzName: "UTC", +// }, segments) +// +// assert.NoError(t, err) +// mockQueries.AssertExpectations(t) +// } diff --git a/api/internal/upload/xlsx.go b/api/internal/upload/xlsx.go new file mode 100644 index 00000000..e9327cf0 --- /dev/null +++ b/api/internal/upload/xlsx.go @@ -0,0 +1,73 @@ +package upload + +import ( + "context" + "errors" + "io" + "log" + + "github.com/USACE/instrumentation-api/api/internal/db" + "github.com/xuri/excelize/v2" +) + +func (up *Upload) TimeseriesMeasurementNoteCreateOrUpdateBatchXLSX(ctx context.Context, r io.Reader, uc db.VUploaderConfig) error { + if uc.XlsxSheetName == nil { + return errors.New("must provide valid sheet name for xlsx") + } + f, err := excelize.OpenReader(r) + if err != nil { + return err + } + defer func() { + if err := f.Close(); err != nil { + log.Print(err.Error()) + } + }() + rows, err := f.Rows(*uc.XlsxSheetName) + if err != nil { + return err + } + defer func() { + if err := rows.Close(); err != nil { + log.Print(err) + } + }() + + // expect first row to be header with data + if next := rows.Next(); !next { + return errors.New("xlsx file is empty") + } + h1, err := rows.Columns() + if err != nil { + return err + } + + colCfg := NewColumnConfigFromHeaderOpts(HeaderOpts{ + headerCols: h1, + timeField: uc.TimeField, + maskedField: uc.MaskedField, + validatedField: uc.ValidatedField, + commentField: uc.CommentField, + instrumentField: uc.InstrumentField, + }) + if err := up.WithColumnConfig(colCfg); err != nil { + return err + } + + for rows.Next() { + if up.rowNumber < up.RowOffset { + continue + } + up.rowNumber++ + + row, err := rows.Columns() + if err != nil { + return err + } + if err := up.UploadMeasurementNoteBatchRow(ctx, row); err != nil { + return err + } + + } + return up.UploadMeasurementNoteBatchRemaining(ctx) +} diff --git a/api/migrations/repeat/0040__views_instruments.sql b/api/migrations/repeat/0040__views_instruments.sql index 3832c27e..e325edb5 100644 --- a/api/migrations/repeat/0040__views_instruments.sql +++ b/api/migrations/repeat/0040__views_instruments.sql @@ -67,9 +67,10 @@ CREATE OR REPLACE VIEW v_instrument AS ( ) s ON s.instrument_id = i.id LEFT JOIN ( SELECT - ARRAY_AGG(timeseries_id) as constants, + ARRAY_AGG(id) as constants, instrument_id - FROM instrument_constants + FROM timeseries + WHERE type = 'constant' GROUP BY instrument_id ) c ON c.instrument_id = i.id LEFT JOIN ( diff --git a/api/migrations/repeat/0120__views_alert_check.sql b/api/migrations/repeat/0120__views_alert_check.sql index 4bd6adb4..e0787029 100644 --- a/api/migrations/repeat/0120__views_alert_check.sql +++ b/api/migrations/repeat/0120__views_alert_check.sql @@ -49,7 +49,7 @@ CREATE OR REPLACE VIEW v_alert_check_measurement_submittal AS ( MAX(time) FILTER (WHERE time > sub.created_at AND time <= sub.due_at) AS time FROM timeseries_measurement WHERE timeseries_id = ANY(SELECT id FROM timeseries WHERE instrument_id = inst.id) - AND NOT timeseries_id = ANY(SELECT timeseries_id FROM instrument_constants) + AND NOT timeseries_id = ANY(SELECT id FROM timeseries WHERE type='constant') GROUP BY timeseries_id ) lm ON true LEFT JOIN timeseries ts ON ts.id = lm.timeseries_id diff --git a/api/migrations/repeat/0140__views_depth_based_instruments.sql b/api/migrations/repeat/0140__views_depth_based_instruments.sql index fcd385a1..9119b31a 100644 --- a/api/migrations/repeat/0140__views_depth_based_instruments.sql +++ b/api/migrations/repeat/0140__views_depth_based_instruments.sql @@ -21,6 +21,7 @@ CREATE OR REPLACE VIEW v_saa_segment AS ( ORDER BY time DESC LIMIT 1 ) sub ON true + ORDER BY seg.id ASC ); @@ -114,6 +115,7 @@ CREATE OR REPLACE VIEW v_ipi_segment AS ( ORDER BY time DESC LIMIT 1 ) sub ON true + ORDER BY seg.id ASC ); @@ -176,6 +178,7 @@ CREATE OR REPLACE VIEW v_incl_segment AS ( seg.b0_timeseries_id, seg.b180_timeseries_id FROM incl_segment seg + ORDER BY seg.id ASC ); @@ -184,6 +187,8 @@ CREATE OR REPLACE VIEW v_incl_measurement AS ( r.instrument_id, r.time, COALESCE(jsonb_agg(jsonb_build_object( + 'segment_id', r.segment_id, + 'instrument_id', r.instrument_id, 'depth', r.depth, 'a0', r.a0, 'a180', r.a180, diff --git a/api/migrations/repeat/0170__views_uploader.sql b/api/migrations/repeat/0170__views_uploader.sql index e7a92ea3..6f4efecd 100644 --- a/api/migrations/repeat/0170__views_uploader.sql +++ b/api/migrations/repeat/0170__views_uploader.sql @@ -22,7 +22,11 @@ CREATE VIEW v_uploader_config AS ( u.comment_field, u.column_offset, u.row_offset, - u.depth_based_instrument_id + u.depth_based_instrument_id, + u.instrument_field_enabled, + u.instrument_field, + u.xlsx_sheet_name, + u.prefer_day_first FROM uploader_config u INNER JOIN profile pc ON u.created_by = pc.id LEFT JOIN profile pu ON u.updated_by = pu.id diff --git a/api/migrations/repeat/0180__views_survey123.sql b/api/migrations/repeat/0180__views_survey123.sql new file mode 100644 index 00000000..bee2a0c0 --- /dev/null +++ b/api/migrations/repeat/0180__views_survey123.sql @@ -0,0 +1,33 @@ +CREATE OR REPLACE VIEW v_survey123 AS ( + SELECT + sv.id, + sv.project_id, + sv.name, + sv.slug, + sv.created_at, + sv.updated_at, + sv.created_by, + p1.username AS created_by_username, + sv.updated_by, + p2.username AS updated_by_username, + COALESCE(f.fields, '[]'::json) AS fields, + COALESCE(er.errors, '{}') AS errors + FROM survey123 sv + LEFT JOIN profile p1 ON p1.id = sv.created_by + LEFT JOIN profile p2 ON p2.id = sv.updated_by + LEFT JOIN LATERAL ( + SELECT json_agg(json_build_object( + 'field_name', eq.field_name, + 'display_name', eq.display_name, + 'instrument_id', eq.instrument_id, + 'timeseries_id', eq.timeseries_id + ) ORDER BY eq.field_name) AS fields + FROM survey123_equivalency_table eq + WHERE eq.survey123_id = sv.id + ) f ON true + LEFT JOIN LATERAL ( + SELECT array_agg(ier.error_message) AS errors + FROM survey123_payload_error ier + WHERE ier.survey123_id = sv.id + ) er ON true +); diff --git a/api/migrations/schema/V1.25.00__uploader_instrument_col.sql b/api/migrations/schema/V1.25.00__uploader_instrument_col.sql new file mode 100644 index 00000000..61856cfc --- /dev/null +++ b/api/migrations/schema/V1.25.00__uploader_instrument_col.sql @@ -0,0 +1,16 @@ +ALTER TABLE uploader_config +ADD COLUMN instrument_field_enabled boolean NOT NULL DEFAULT false, +ADD COLUMN instrument_field text; + +ALTER TABLE uploader_config_mapping +ADD COLUMN instrument_field_name text, +DROP CONSTRAINT uploader_config_mapping_uploader_config_id_field_name; + +ALTER TABLE uploader_config_mapping +ADD CONSTRAINT uploader_config_mapping_uploader_config_id_instrument_field_name_field_name + UNIQUE(uploader_config_id, instrument_field_name, field_name); + +ALTER TYPE uploader_config_type ADD VALUE 'xlsx'; + +ALTER TABLE uploader_config +ADD COLUMN xlsx_sheet_name text; diff --git a/api/migrations/schema/V1.26.00__survey123.sql b/api/migrations/schema/V1.26.00__survey123.sql new file mode 100644 index 00000000..ae83ea29 --- /dev/null +++ b/api/migrations/schema/V1.26.00__survey123.sql @@ -0,0 +1,35 @@ +CREATE TABLE survey123 ( + id uuid PRIMARY KEY DEFAULT uuid_generate_v4(), + project_id uuid NOT NULL REFERENCES project(id), + name text UNIQUE NOT NULL, + slug text UNIQUE NOT NULL, + created_at timestamptz NOT NULL DEFAULT now(), + updated_at timestamptz, + created_by uuid NOT NULL REFERENCES profile(id), + updated_by uuid REFERENCES profile(id), + deleted boolean NOT NULL DEFAULT false, + CONSTRAINT survey123_id_deleted_key UNIQUE (id, deleted) +); + +CREATE TABLE survey123_equivalency_table ( + survey123_id uuid NOT NULL REFERENCES survey123(id), + survey123_deleted boolean NOT NULL DEFAULT false, + field_name text NOT NULL, + display_name text, + instrument_id uuid REFERENCES instrument(id) ON DELETE CASCADE, + timeseries_id uuid REFERENCES timeseries(id) ON DELETE CASCADE, + CONSTRAINT survey123_equivalency_table_survey123_id_survey123_deleted_field_name_key UNIQUE (survey123_id, survey123_deleted, field_name), + CONSTRAINT unique_active_survey123 FOREIGN KEY (survey123_id, survey123_deleted) + REFERENCES survey123(id, deleted) ON UPDATE CASCADE +); + +CREATE TABLE survey123_preview ( + survey123_id uuid NOT NULL REFERENCES survey123(id), + preview text NOT NULL, + updated_at timestamptz NOT NULL DEFAULT now() +); + +CREATE TABLE survey123_payload_error ( + survey123_id uuid NOT NULL REFERENCES survey123(id), + error_message text +); diff --git a/api/migrations/schema/V1.27.00__day_month_order_uploader_config.sql b/api/migrations/schema/V1.27.00__day_month_order_uploader_config.sql new file mode 100644 index 00000000..5727d165 --- /dev/null +++ b/api/migrations/schema/V1.27.00__day_month_order_uploader_config.sql @@ -0,0 +1 @@ +ALTER TABLE uploader_config ADD column prefer_day_first boolean NOT NULL DEFAULT false; diff --git a/api/migrations/schema/V1.28.00__rm_instrument_constant.sql b/api/migrations/schema/V1.28.00__rm_instrument_constant.sql new file mode 100644 index 00000000..89f70ee4 --- /dev/null +++ b/api/migrations/schema/V1.28.00__rm_instrument_constant.sql @@ -0,0 +1,24 @@ +-- ensure that all timeseries constants have correct type +UPDATE timeseries SET "type"='constant' WHERE id = any(SELECT timeseries_id FROM instrument_constants); + +-- drop all views; to be recreated +DO $$ +DECLARE drop_views_query text; +BEGIN + SELECT 'DROP VIEW ' || string_agg (table_name, ', ') || ' CASCADE;' + FROM information_schema.views + INTO drop_views_query + WHERE table_schema = 'midas' + AND table_name LIKE 'v_%'; + + IF (drop_views_query IS NULL) THEN + RAISE NOTICE 'not dropping views on schema midas -- no views found to drop'; + ELSE + EXECUTE drop_views_query; + END IF; +END +$$; + +-- instrument constants are already has timeseries of type "constant" +-- timeseries each have an "instrument_id" +DROP TABLE instrument_constants; diff --git a/api/migrations/seed/V0.17.19__seed_ts_measurements.sql b/api/migrations/seed/V0.17.19__seed_ts_measurements.sql index 94986465..415a8995 100644 --- a/api/migrations/seed/V0.17.19__seed_ts_measurements.sql +++ b/api/migrations/seed/V0.17.19__seed_ts_measurements.sql @@ -1,5 +1,5 @@ -- https://www.timescale.com/blog/how-to-shape-sample-data-with-postgresql-generate_series-and-sql/ -create temp table if not exists seed_data_overrides ( +create table if not exists seed_data_overrides ( m_val int not null, p_inc float4 not null ); diff --git a/api/migrations/seed/V1.25.01__seed_uploader_config.sql b/api/migrations/seed/V1.25.01__seed_uploader_config.sql new file mode 100644 index 00000000..2f5e4067 --- /dev/null +++ b/api/migrations/seed/V1.25.01__seed_uploader_config.sql @@ -0,0 +1,127 @@ +INSERT INTO instrument (id, slug, name, geometry, station, station_offset, created_by, type_id) VALUES +('f4a9c03b-9c52-4bb2-8b2b-103df48e6f9c', 'inclinometer-001', 'Inclinometer 001', ST_SetSRID(ST_MakePoint(-75.1652, 39.9526), 4326), 100.0, 10.0, '57329df6-9f7a-4dad-9383-4633b452efab', '3c3dfc23-ed2a-4a4a-9ce0-683c7c1d4d20'); + +INSERT INTO project_instrument (project_id, instrument_id) VALUES +('5b6f4f37-7755-4cf9-bd02-94f1e9bc5984', 'f4a9c03b-9c52-4bb2-8b2b-103df48e6f9c'); + +INSERT INTO timeseries (id, slug, name, instrument_id, parameter_id, unit_id, type) VALUES +('ac3d9cb5-4e7b-420d-82c7-207d85e48f50', 'constant-bottom-elevation', 'Bottom Elevation Constant', 'f4a9c03b-9c52-4bb2-8b2b-103df48e6f9c', '00000000-0000-0000-0000-000000000000', '00000000-0000-0000-0000-000000000000', 'constant'); + +INSERT INTO timeseries_measurement (timeseries_id, time, value) values ('ac3d9cb5-4e7b-420d-82c7-207d85e48f50', now(), 100.5); + +INSERT INTO timeseries (id, slug, name, instrument_id, type) VALUES +('1e3a1d3c-38e3-4f34-b65f-d2b8287ed591', 'depth-segment-1', 'Depth Segment 1', 'f4a9c03b-9c52-4bb2-8b2b-103df48e6f9c', 'standard'), +('229d8f8d-fd60-465e-94c4-c25bc79d4f7e', 'a0-segment-1', 'A0 Segment 1', 'f4a9c03b-9c52-4bb2-8b2b-103df48e6f9c', 'standard'), +('74d19174-f911-4234-96e4-fae1a49969e6', 'a180-segment-1', 'A180 Segment 1', 'f4a9c03b-9c52-4bb2-8b2b-103df48e6f9c', 'standard'), +('fd7c720e-7119-45dc-bf7a-44da303a9aa4', 'b0-segment-1', 'B0 Segment 1', 'f4a9c03b-9c52-4bb2-8b2b-103df48e6f9c', 'standard'), +('e3f7d76b-8aa3-4d25-a5f1-4ad715dd13c1', 'b180-segment-1', 'B180 Segment 1', 'f4a9c03b-9c52-4bb2-8b2b-103df48e6f9c', 'standard'), +('6d623d88-b6e8-4f0d-86be-d4445b6b6789', 'depth-segment-2', 'Depth Segment 2', 'f4a9c03b-9c52-4bb2-8b2b-103df48e6f9c', 'standard'), +('40dfcce3-7f36-475e-969c-2b0b8633c856', 'a0-segment-2', 'A0 Segment 2', 'f4a9c03b-9c52-4bb2-8b2b-103df48e6f9c', 'standard'), +('20792b7d-0f87-4f4b-81c6-616a8d76613a', 'a180-segment-2', 'A180 Segment 2', 'f4a9c03b-9c52-4bb2-8b2b-103df48e6f9c', 'standard'), +('659d026e-3f47-4efe-899f-4129b5466228', 'b0-segment-2', 'B0 Segment 2', 'f4a9c03b-9c52-4bb2-8b2b-103df48e6f9c', 'standard'), +('015a07f9-4005-4d2d-96dc-7f7d611ca51a', 'b180-segment-2', 'B180 Segment 2', 'f4a9c03b-9c52-4bb2-8b2b-103df48e6f9c', 'standard'), +('600d164d-432c-40e8-a1f7-b4ebc112939e', 'depth-segment-3', 'Depth Segment 3', 'f4a9c03b-9c52-4bb2-8b2b-103df48e6f9c', 'standard'), +('b5e12c65-acdb-4439-a436-c762096e54d3', 'a0-segment-3', 'A0 Segment 3', 'f4a9c03b-9c52-4bb2-8b2b-103df48e6f9c', 'standard'), +('34bfea2d-f312-4c13-bf4f-ac760236484c', 'a180-segment-3', 'A180 Segment 3', 'f4a9c03b-9c52-4bb2-8b2b-103df48e6f9c', 'standard'), +('3f53c9ef-0058-49f3-b62c-b8cad5f92d4d', 'b0-segment-3', 'B0 Segment 3', 'f4a9c03b-9c52-4bb2-8b2b-103df48e6f9c', 'standard'), +('14a4ae19-a857-44bb-a9e6-3df7e563847d', 'b180-segment-3', 'B180 Segment 3', 'f4a9c03b-9c52-4bb2-8b2b-103df48e6f9c', 'standard'); + +INSERT INTO incl_segment (instrument_id, id, depth_timeseries_id, a0_timeseries_id, a180_timeseries_id, b0_timeseries_id, b180_timeseries_id) VALUES +('f4a9c03b-9c52-4bb2-8b2b-103df48e6f9c', 1, '1e3a1d3c-38e3-4f34-b65f-d2b8287ed591', '229d8f8d-fd60-465e-94c4-c25bc79d4f7e', '74d19174-f911-4234-96e4-fae1a49969e6', 'fd7c720e-7119-45dc-bf7a-44da303a9aa4', 'e3f7d76b-8aa3-4d25-a5f1-4ad715dd13c1'), +('f4a9c03b-9c52-4bb2-8b2b-103df48e6f9c', 2, '6d623d88-b6e8-4f0d-86be-d4445b6b6789', '40dfcce3-7f36-475e-969c-2b0b8633c856', '20792b7d-0f87-4f4b-81c6-616a8d76613a', '659d026e-3f47-4efe-899f-4129b5466228', '015a07f9-4005-4d2d-96dc-7f7d611ca51a'), +('f4a9c03b-9c52-4bb2-8b2b-103df48e6f9c', 3, '600d164d-432c-40e8-a1f7-b4ebc112939e', 'b5e12c65-acdb-4439-a436-c762096e54d3', '34bfea2d-f312-4c13-bf4f-ac760236484c', '3f53c9ef-0058-49f3-b62c-b8cad5f92d4d', '14a4ae19-a857-44bb-a9e6-3df7e563847d'); + +INSERT INTO incl_opts (instrument_id, num_segments, bottom_elevation_timeseries_id, initial_time) VALUES +('f4a9c03b-9c52-4bb2-8b2b-103df48e6f9c', 3, 'ac3d9cb5-4e7b-420d-82c7-207d85e48f50', now()); + +INSERT INTO timeseries (id, slug, name, instrument_id, type) VALUES +('1a547f81-ff98-4d2c-8fb4-9151e28b8d7a', 'distance-to-water-1', 'Distance to Water', '9e8f2ca4-4037-45a4-aaca-d9e598877439', 'standard'), +('3c4a0e1d-03a1-4d2b-9b6f-4521b52f491d', 'uploader-timeseries-3', 'Uploader Timeseries 3', 'a7540f69-c41e-43b3-b655-6e44097edb7e', 'standard'), +('4d5b281f-14b8-42d7-bb1e-9c6118da813f', 'uploader-timeseries-4', 'Uploader Timeseries 4', 'a7540f69-c41e-43b3-b655-6e44097edb7e', 'standard'), +('5e6c3920-25b9-43e9-a58e-28d6e49516b2', 'uploader-timeseries-5', 'Uploader Timeseries 5', 'a7540f69-c41e-43b3-b655-6e44097edb7e', 'standard'), +('6f7d4a21-36ca-44fb-99e0-59a7e60627c3', 'uploader-timeseries-6', 'Uploader Timeseries 6', 'a7540f69-c41e-43b3-b655-6e44097edb7e', 'standard'), +('7a8e5b22-47db-45fc-aa91-8a08f71738d4', 'uploader-timeseries-7', 'Uploader Timeseries 7', 'a7540f69-c41e-43b3-b655-6e44097edb7e', 'standard'), +('8b9f6c23-58ec-460d-ab02-3b19f82849e5', 'uploader-timeseries-8', 'Uploader Timeseries 8', 'a7540f69-c41e-43b3-b655-6e44097edb7e', 'standard'), +('9caf7d24-69fd-471e-bc13-6a2a09395af6', 'uploader-timeseries-9', 'Uploader Timeseries 9', 'a7540f69-c41e-43b3-b655-6e44097edb7e', 'standard'), +('adb08e25-7b0e-482f-cd24-7b3b1a4a6b07', 'uploader-timeseries-10', 'Uploader Timeseries 10', 'a7540f69-c41e-43b3-b655-6e44097edb7e', 'standard'); + +-- need to commit txn to use updated uploader_config_type enum +COMMIT; + +INSERT INTO uploader_config (id, project_id, slug, name, description, created_by, type, tz_name, time_field, validated_field_enabled, validated_field, masked_field_enabled, masked_field, comment_field_enabled, comment_field, column_offset, row_offset, depth_based_instrument_id, instrument_field_enabled, instrument_field) VALUES +('e1f25d2b-807e-4d3a-b93c-8d42c0a38f87', '5b6f4f37-7755-4cf9-bd02-94f1e9bc5984', 'xlsx-config-1', 'XLSX Config with Instrument', 'XLSX uploader with instrument field', '57329df6-9f7a-4dad-9383-4633b452efab', 'xlsx', 'UTC', 'time', true, 'valid', false, NULL, false, NULL, 0, 0, 'a7540f69-c41e-43b3-b655-6e44097edb7e', true, 'instrument_field'), +('f2e36e3c-918f-4e4b-cd14-9e53c1b49f98', '5b6f4f37-7755-4cf9-bd02-94f1e9bc5984', 'xlsx-config-2', 'XLSX Config without Instrument', 'XLSX uploader without instrument field', '57329df6-9f7a-4dad-9383-4633b452efab', 'xlsx', 'UTC', 'time', false, NULL, false, NULL, false, NULL, 0, 0, NULL, false, NULL), +('10f47e4d-a29f-4f5c-de24-0f64d2c5a099', '5b6f4f37-7755-4cf9-bd02-94f1e9bc5984', 'csv-config-1', 'CSV Config with Instrument', 'CSV uploader with instrument field', '57329df6-9f7a-4dad-9383-4633b452efab', 'csv', 'UTC', 'time', true, 'valid', false, NULL, false, NULL, 0, 0, 'a7540f69-c41e-43b3-b655-6e44097edb7e', true, 'instrument_field'), +('21f58e5e-b3af-505d-ef34-1f75e3d6b1aa', '5b6f4f37-7755-4cf9-bd02-94f1e9bc5984', 'csv-config-2', 'CSV Config without Instrument', 'CSV uploader without instrument field', '57329df6-9f7a-4dad-9383-4633b452efab', 'csv', 'UTC', 'time', false, NULL, false, NULL, false, NULL, 0, 0, NULL, false, NULL), +('32f69e6f-c4bf-616e-f044-2f86f4e7c2bb', '5b6f4f37-7755-4cf9-bd02-94f1e9bc5984', 'toa5-config-1', 'TOA5 Config', 'TOA5 uploader configuration', '57329df6-9f7a-4dad-9383-4633b452efab', 'toa5', 'UTC', 'time', false, NULL, false, NULL, false, NULL, 0, 0, NULL, false, NULL), +('43f7af70-d5cf-727f-1044-3f97f5f8d3cc', '5b6f4f37-7755-4cf9-bd02-94f1e9bc5984', 'dux-config-1', 'DUX Config', 'DUX uploader configuration', '57329df6-9f7a-4dad-9383-4633b452efab', 'dux', 'UTC', 'time', false, NULL, false, NULL, false, NULL, 0, 0, 'f4a9c03b-9c52-4bb2-8b2b-103df48e6f9c', false, NULL); + +INSERT INTO uploader_config_mapping (uploader_config_id, field_name, timeseries_id, instrument_field_name) VALUES +('e1f25d2b-807e-4d3a-b93c-8d42c0a38f87', 'DTW', '7ee902a3-56d0-4acf-8956-67ac82c03a96', 'Demo Piezometer'), +('e1f25d2b-807e-4d3a-b93c-8d42c0a38f87', 'DTW', '1a547f81-ff98-4d2c-8fb4-9151e28b8d7a', 'Demo Staffgage'), +('f2e36e3c-918f-4e4b-cd14-9e53c1b49f98', 'UploaderTimeseries3', '3c4a0e1d-03a1-4d2b-9b6f-4521b52f491d', NULL), +('f2e36e3c-918f-4e4b-cd14-9e53c1b49f98', 'UploaderTimeseries4', '4d5b281f-14b8-42d7-bb1e-9c6118da813f', NULL), +('10f47e4d-a29f-4f5c-de24-0f64d2c5a099', 'DTW', '7ee902a3-56d0-4acf-8956-67ac82c03a96', 'Demo Piezometer'), +('10f47e4d-a29f-4f5c-de24-0f64d2c5a099', 'DTW', '1a547f81-ff98-4d2c-8fb4-9151e28b8d7a', 'Demo Staffgage'), +('21f58e5e-b3af-505d-ef34-1f75e3d6b1aa', 'UploaderTimeseries7', '7a8e5b22-47db-45fc-aa91-8a08f71738d4', NULL), +('21f58e5e-b3af-505d-ef34-1f75e3d6b1aa', 'UploaderTimeseries8', '8b9f6c23-58ec-460d-ab02-3b19f82849e5', NULL), +('32f69e6f-c4bf-616e-f044-2f86f4e7c2bb', 'UploaderTimeseries3', '9caf7d24-69fd-471e-bc13-6a2a09395af6', NULL), +('32f69e6f-c4bf-616e-f044-2f86f4e7c2bb', 'UploaderTimeseries4', 'adb08e25-7b0e-482f-cd24-7b3b1a4a6b07', NULL); + + +insert into timeseries_measurement (timeseries_id, time, value) +select + timeseries_id, + m.time, + m.value +from +unnest(array[ + '1e3a1d3c-38e3-4f34-b65f-d2b8287ed591'::uuid, + '229d8f8d-fd60-465e-94c4-c25bc79d4f7e'::uuid, + '74d19174-f911-4234-96e4-fae1a49969e6'::uuid, + 'fd7c720e-7119-45dc-bf7a-44da303a9aa4'::uuid, + 'e3f7d76b-8aa3-4d25-a5f1-4ad715dd13c1'::uuid, + '6d623d88-b6e8-4f0d-86be-d4445b6b6789'::uuid, + '40dfcce3-7f36-475e-969c-2b0b8633c856'::uuid, + '20792b7d-0f87-4f4b-81c6-616a8d76613a'::uuid, + '659d026e-3f47-4efe-899f-4129b5466228'::uuid, + '015a07f9-4005-4d2d-96dc-7f7d611ca51a'::uuid, + '600d164d-432c-40e8-a1f7-b4ebc112939e'::uuid, + 'b5e12c65-acdb-4439-a436-c762096e54d3'::uuid, + '34bfea2d-f312-4c13-bf4f-ac760236484c'::uuid, + '3f53c9ef-0058-49f3-b62c-b8cad5f92d4d'::uuid, + '14a4ae19-a857-44bb-a9e6-3df7e563847d'::uuid, + '1a547f81-ff98-4d2c-8fb4-9151e28b8d7a'::uuid, + '3c4a0e1d-03a1-4d2b-9b6f-4521b52f491d'::uuid, + '4d5b281f-14b8-42d7-bb1e-9c6118da813f'::uuid, + '5e6c3920-25b9-43e9-a58e-28d6e49516b2'::uuid, + '6f7d4a21-36ca-44fb-99e0-59a7e60627c3'::uuid, + '7a8e5b22-47db-45fc-aa91-8a08f71738d4'::uuid, + '8b9f6c23-58ec-460d-ab02-3b19f82849e5'::uuid, + '9caf7d24-69fd-471e-bc13-6a2a09395af6'::uuid, + 'adb08e25-7b0e-482f-cd24-7b3b1a4a6b07'::uuid +]) as timeseries_id, +( + with daily_series as ( + select ts, date(ts) as day, rownum + from generate_series(now() - interval '1 year', now(), '1 day'::interval) with ordinality as t(ts, rownum) + ), + daily_value as ( + select ts, day, date_part('month', ts) as m_val, rownum, random() as val + from daily_series + order by day + ), + daily_wave as ( + select + day, + 1 + .2 * cos(rownum * 6.28/180) as p_mod + from daily_series + day + ) + select dv.ts as time, (500 + 20 * val) * p_mod * rownum * p_inc as value + from daily_value dv + inner join daily_wave dw on dv.day=dw.day + inner join seed_data_overrides o on dv.m_val=o.m_val + order by ts +) m +on conflict do nothing; diff --git a/api/migrations/seed/V1.26.01__seed_survey123.sql b/api/migrations/seed/V1.26.01__seed_survey123.sql new file mode 100644 index 00000000..86309774 --- /dev/null +++ b/api/migrations/seed/V1.26.01__seed_survey123.sql @@ -0,0 +1,23 @@ +INSERT INTO timeseries (id, instrument_id, slug, name, type) VALUES +('359bd5df-d43e-491a-871d-4701dcbff136', 'd8c66ef9-06f0-4d52-9233-f3778e0624f0', 'test-survey123-mapping-1', 'test-survey123-mapping-1', 'standard'), +('c3c00251-12fb-42a1-9d49-cdb269bb3039', 'd8c66ef9-06f0-4d52-9233-f3778e0624f0', 'test-survey123-mapping-2', 'test-survey123-mapping-2', 'standard'), +('939ff61d-65df-4f7f-b145-210d00f16d90', 'd8c66ef9-06f0-4d52-9233-f3778e0624f0', 'test-survey123-mapping-3', 'test-survey123-mapping-3', 'standard'), +('e45a9620-a431-4b70-af97-a4e185eb7311', 'd8c66ef9-06f0-4d52-9233-f3778e0624f0', 'test-survey123-mapping-4', 'test-survey123-mapping-4', 'standard'); + +INSERT INTO survey123 (id, project_id, name, slug, created_at, created_by) VALUES +('f5e1f7d2-7b1d-4b1e-8e93-d50e55b0a6b6', '5b6f4f37-7755-4cf9-bd02-94f1e9bc5984', 'Survey 1', 'survey-1', now(), '57329df6-9f7a-4dad-9383-4633b452efab'), +('a2e19d85-4c64-4e99-b93a-4f4f56a718cf', '5b6f4f37-7755-4cf9-bd02-94f1e9bc5984', 'Survey 2', 'survey-2', now(), '57329df6-9f7a-4dad-9383-4633b452efab'); + +INSERT INTO survey123_equivalency_table (survey123_id, survey123_deleted, field_name, display_name, instrument_id, timeseries_id) VALUES +('f5e1f7d2-7b1d-4b1e-8e93-d50e55b0a6b6', false, 'field1', 'Field 1', 'd8c66ef9-06f0-4d52-9233-f3778e0624f0', '359bd5df-d43e-491a-871d-4701dcbff136'), +('f5e1f7d2-7b1d-4b1e-8e93-d50e55b0a6b6', false, 'field2', 'Field 2', 'd8c66ef9-06f0-4d52-9233-f3778e0624f0', 'c3c00251-12fb-42a1-9d49-cdb269bb3039'), +('a2e19d85-4c64-4e99-b93a-4f4f56a718cf', false, 'field3', 'Field 3', 'd8c66ef9-06f0-4d52-9233-f3778e0624f0', '939ff61d-65df-4f7f-b145-210d00f16d90'), +('a2e19d85-4c64-4e99-b93a-4f4f56a718cf', false, 'field4', 'Field 4', 'd8c66ef9-06f0-4d52-9233-f3778e0624f0', 'e45a9620-a431-4b70-af97-a4e185eb7311'); + +INSERT INTO survey123_preview (survey123_id, preview, updated_at) VALUES +('f5e1f7d2-7b1d-4b1e-8e93-d50e55b0a6b6', '{"content": "Preview content for Survey 1"}', now()), +('a2e19d85-4c64-4e99-b93a-4f4f56a718cf', '{"content": "Preview content for Survey 2"}', now()); + +INSERT INTO survey123_payload_error (survey123_id, error_message) VALUES +('f5e1f7d2-7b1d-4b1e-8e93-d50e55b0a6b6', 'Error message for Survey 1'), +('a2e19d85-4c64-4e99-b93a-4f4f56a718cf', 'Error message for Survey 2'); diff --git a/api/queries/domain.sql b/api/queries/domain.sql index 3a3f781e..6d0249d2 100644 --- a/api/queries/domain.sql +++ b/api/queries/domain.sql @@ -1,7 +1,3 @@ --- name: PgTimezoneNamesList :many -select name, abbrev, utc_offset::text, is_dst from pg_catalog.pg_timezone_names; - - -- name: DomainList :many select * from v_domain; diff --git a/api/queries/equivalency_table.sql b/api/queries/equivalency_table.sql index e1a8d0cd..f782b9b5 100644 --- a/api/queries/equivalency_table.sql +++ b/api/queries/equivalency_table.sql @@ -2,16 +2,6 @@ select not exists (select * from datalogger_table where id = $1 and table_name = 'preparse'); --- name: EquivalencyTableTimeseriesGetIsValid :one -select not exists ( - select id from v_timeseries_computed - where id = $1 - union all - select timeseries_id from instrument_constants - where timeseries_id = $1 -); - - -- name: EquivalencyTableGet :one select datalogger_id, @@ -22,7 +12,7 @@ from v_datalogger_equivalency_table where datalogger_table_id = $1; --- name: EquivalencyTableCreateOrUpdate :exec +-- name: EquivalencyTableCreateOrUpdateBatch :batchexec insert into datalogger_equivalency_table (datalogger_id, datalogger_table_id, field_name, display_name, instrument_id, timeseries_id) values ($1, $2, $3, $4, $5, $6) @@ -30,7 +20,7 @@ on conflict on constraint datalogger_equivalency_table_datalogger_table_id_field do update set display_name = excluded.display_name, instrument_id = excluded.instrument_id, timeseries_id = excluded.timeseries_id; --- name: EquivalencyTableUpdate :exec +-- name: EquivalencyTableUpdateBatch :batchexec update datalogger_equivalency_table set field_name = $2, display_name = $3, diff --git a/api/queries/instrument.sql b/api/queries/instrument.sql index 783e4046..454a3a42 100644 --- a/api/queries/instrument.sql +++ b/api/queries/instrument.sql @@ -33,8 +33,8 @@ values ( sqlc.arg(station_offset), sqlc.arg(created_by), sqlc.arg(created_at), - sqlc.arg(nid_id), - sqlc.arg(usgs_id), + sqlc.narg(nid_id), + sqlc.narg(usgs_id), sqlc.arg(show_cwms_tab) ) returning id, slug; @@ -50,8 +50,8 @@ values ( sqlc.arg(station_offset), sqlc.arg(created_by), sqlc.arg(created_at), - sqlc.arg(nid_id), - sqlc.arg(usgs_id), + sqlc.narg(nid_id), + sqlc.narg(usgs_id), sqlc.arg(show_cwms_tab) ) returning id, slug; @@ -79,8 +79,8 @@ update instrument set updated_at=sqlc.arg(updated_at), station=sqlc.arg(station), station_offset=sqlc.arg(station_offset), - nid_id=sqlc.arg(nid_id), - usgs_id=sqlc.arg(usgs_id), + nid_id=sqlc.narg(nid_id), + usgs_id=sqlc.narg(usgs_id), show_cwms_tab=sqlc.arg(show_cwms_tab) where id = sqlc.arg(id) and id = any( diff --git a/api/queries/instrument_constant.sql b/api/queries/instrument_constant.sql deleted file mode 100644 index 58e3b112..00000000 --- a/api/queries/instrument_constant.sql +++ /dev/null @@ -1,16 +0,0 @@ --- name: InstrumentConstantList :many -select t.* from v_timeseries t -inner join instrument_constants ic on ic.timeseries_id = t.id -where ic.instrument_id = $1; - - --- name: InstrumentConstantCreate :exec -insert into instrument_constants (instrument_id, timeseries_id) values ($1, $2); - - --- name: InstrumentConstantCreateBatch :batchexec -insert into instrument_constants (instrument_id, timeseries_id) values ($1, $2); - - --- name: InstrumentConstantDelete :exec -delete from instrument_constants where instrument_id = $1 and timeseries_id = $2; diff --git a/api/queries/measurement.sql b/api/queries/measurement.sql index d84070bc..994fa1ad 100644 --- a/api/queries/measurement.sql +++ b/api/queries/measurement.sql @@ -33,12 +33,6 @@ insert into timeseries_measurement (timeseries_id, time, value) values ($1, $2, on conflict on constraint timeseries_unique_time do update set value = excluded.value; --- name: TimeseriesMeasurementCreateOrUpdateAtTimezoneBatch :batchexec -insert into timeseries_measurement (timeseries_id, time, value) -values (sqlc.arg(timeseries_id), ((sqlc.arg(local_time)::timestamp at time zone sqlc.arg(timezone)::text) at time zone 'UTC')::timestamptz, sqlc.arg(value)) -on conflict on constraint timeseries_unique_time do update set value = excluded.value; - - -- name: TimeseriesNoteCreate :exec insert into timeseries_notes (timeseries_id, time, masked, validated, annotation) values ($1, $2, $3, $4, $5) on conflict on constraint notes_unique_time do nothing; @@ -49,12 +43,6 @@ insert into timeseries_notes (timeseries_id, time, masked, validated, annotation on conflict on constraint notes_unique_time do nothing; --- name: TimeseriesNoteCreateOrUpdateAtTimezoneBatch :batchexec -insert into timeseries_notes (timeseries_id, time, masked, validated, annotation) -values (sqlc.arg(timeseries_id), ((sqlc.arg(local_time)::timestamp at time zone sqlc.arg(timezone)::text) at time zone 'UTC')::timestamptz, sqlc.arg(masked), sqlc.arg(validated), sqlc.arg(annotation)) -on conflict on constraint notes_unique_time do nothing; - - -- name: TimeseriesNoteCreateOrUpdate :exec insert into timeseries_notes (timeseries_id, time, masked, validated, annotation) values ($1, $2, $3, $4, $5) on conflict on constraint notes_unique_time do update set masked = excluded.masked, validated = excluded.validated, annotation = excluded.annotation; diff --git a/api/queries/survey123.sql b/api/queries/survey123.sql new file mode 100644 index 00000000..1303c3d0 --- /dev/null +++ b/api/queries/survey123.sql @@ -0,0 +1,49 @@ +-- name: Survey123ListForProject :many +select * from v_survey123 where project_id = $1; + + +-- name: Survey123Create :one +insert into survey123 (project_id, name, slug, created_by) values ($1, $2, slugify($2, 'survey123'), $3) returning id; + + +-- name: Survey123Update :exec +update survey123 set name=$2, updated_by=$3, updated_at=$4 where id=$1; + + +-- name: Survey123PreviewCreateOrUpdate :exec +insert into survey123_preview (survey123_id, preview, updated_at) values ($1,$2,$3) +on conflict on constraint survey123_id_key do update set preview=excluded.preview, updated_at=excluded.updated_at; + + +-- name: Survey123EquivalencyTableRowCreateOrUpdateBatch :batchexec +insert into survey123_equivalency_table (survey123_id, field_name, display_name, instrument_id, timeseries_id) values ($1, $2, $3, $4, $5) +on conflict on constraint survey123_equivalency_table_survey123_id_survey123_deleted_field_name_key do update set +display_name=excluded.display_name, instrument_id=excluded.instrument_id, timeseries_id=excluded.timeseries_id; + + +-- name: Survey123SoftDelete :exec +update survey123 set deleted = true where id = $1; + + +-- name: Survey123EquivalencyTableRowList :many +select * from survey123_equivalency_table where survey123_id = $1; + + +-- name: Survey123PayloadErrorDeleteForSurvey123 :exec +delete from survey123_payload_error where survey123_id = $1; + + +-- name: Survey123PayloadErrorCreateBatch :batchexec +insert into survey123_payload_error (survey123_id, error_message) values ($1, $2); + + +-- name: Survey123EquivalencyTableRowDeleteForSurvey123 :exec +delete from survey123_equivalency_table where survey123_id = $1; + + +-- name: Survey123PreviewGet :one +select p.survey123_id, p.preview, p.updated_at +from survey123_preview p +inner join survey123 s on p.survey123_id = s.id +where p.survey123_id = $1 +and not s.deleted; diff --git a/api/queries/timeseries.sql b/api/queries/timeseries.sql index c03e5f74..8bcc92cb 100644 --- a/api/queries/timeseries.sql +++ b/api/queries/timeseries.sql @@ -19,11 +19,25 @@ select not exists ( ); +-- name: TimeseriesGetIsStandard :one +select exists ( + select true from timeseries + where type = 'standard' + and id = any(sqlc.arg(timeseries_ids)::uuid[]) +); + + -- name: TimeseriesListForInstrument :many select * from v_timeseries where instrument_id = $1; +-- name: TimeseriesListForTypeInstrument :many +select * from v_timeseries +where instrument_id = $1 +and type = $2; + + -- name: TimeseriesListForPlotConfig :many select t.* from v_timeseries t inner join plot_configuration_timeseries_trace pct on pct.timeseries_id = t.id diff --git a/api/queries/uploader.sql b/api/queries/uploader.sql index ffaaef72..61ca5d9d 100644 --- a/api/queries/uploader.sql +++ b/api/queries/uploader.sql @@ -25,12 +25,16 @@ insert into uploader_config ( comment_field, column_offset, row_offset, - depth_based_instrument_id + depth_based_instrument_id, + instrument_field_enabled, + instrument_field, + xlsx_sheet_name, + prefer_day_first ) values ( sqlc.arg(project_id), sqlc.arg(name), - slugify(sqlc.arg(slug), 'uploader_config'), + slugify(sqlc.arg(name), 'uploader_config'), sqlc.arg(description), sqlc.arg(created_at), sqlc.arg(created_by), @@ -45,7 +49,11 @@ values ( sqlc.arg(comment_field), sqlc.arg(column_offset), sqlc.arg(row_offset), - sqlc.narg(depth_based_instrument_id) + sqlc.narg(depth_based_instrument_id), + sqlc.arg(instrument_field_enabled), + sqlc.narg(instrument_field), + sqlc.narg(xlsx_sheet_name), + sqlc.arg(prefer_day_first) ) returning id; @@ -67,7 +75,11 @@ update uploader_config set comment_field=$14, column_offset=$15, row_offset=$16, - depth_based_instrument_id=$17 + depth_based_instrument_id=$17, + instrument_field_enabled=$18, + instrument_field=$19, + xlsx_sheet_name=$20, + prefer_day_first=$21 where id=$1; @@ -80,7 +92,8 @@ select * from uploader_config_mapping where uploader_config_id=$1; -- name: UploaderConfigMappingCreateBatch :batchexec -insert into uploader_config_mapping (uploader_config_id, field_name, timeseries_id) values ($1, $2, $3); +insert into uploader_config_mapping (uploader_config_id, field_name, instrument_field_name, timeseries_id) values +(sqlc.arg(uploader_config_id), sqlc.arg(field_name), sqlc.narg(instrument_field_name), sqlc.arg(timeseries_id)); -- name: UploaderConfigMappingDeleteForUploaderConfig :exec diff --git a/compose.sh b/compose.sh index 2e3e02f6..e27d3265 100755 --- a/compose.sh +++ b/compose.sh @@ -103,7 +103,8 @@ elif [ "$1" = "clean" ]; then $COMPOSECMD -f docker-compose.dev.yml --profile=mock down -v elif [ "$1" = "test" ]; then - $COMPOSECMD up --remove-orphans -d --build db migrate elasticmq api + $COMPOSECMD up --remove-orphans -d --build db elasticmq api + $COMPOSECMD up --build migrate shift TEARDOWN=false diff --git a/go.work.sum b/go.work.sum index 38e3ed0a..1399adaf 100644 --- a/go.work.sum +++ b/go.work.sum @@ -67,6 +67,7 @@ github.com/shoenig/go-m1cpu v0.1.6/go.mod h1:1JJMcUBvfNwpq05QDQVAnx3gUHr9IYF7GNg github.com/shopspring/decimal v1.4.0/go.mod h1:gawqmDU56v4yIKSwfBSFip1HdCCXN8/+DMd9qYNcwME= github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc= github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ= +github.com/stretchr/objx v0.5.2 h1:xuMeJ0Sdp5ZMRXx/aWO6RZxdr3beISkG5/G/aIRr3pY= github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA= github.com/testcontainers/testcontainers-go v0.29.1/go.mod h1:SnKnKQav8UcgtKqjp/AD8bE1MqZm+3TDb/B8crE3XnI= github.com/testcontainers/testcontainers-go/modules/postgres v0.29.1/go.mod h1:YsWyy+pHDgvGdi0axGOx6CGXWsE6eqSaApyd1FYYSSc= @@ -102,6 +103,7 @@ golang.org/x/term v0.19.0/go.mod h1:2CuTdWZ7KHSQwUzKva0cbMg6q2DMI3Mmxp+gKJbskEk= golang.org/x/term v0.21.0/go.mod h1:ooXLefLobQVslOqselCNF4SxFAaoS6KujMbsGzSDmX0= golang.org/x/term v0.22.0/go.mod h1:F3qCibpT5AMpCRfhfT53vVJwhLtIVHhB9XDjfFvnMI4= golang.org/x/term v0.24.0/go.mod h1:lOBK/LVxemqiMij05LGJ0tzNr8xlmwBRJ81PX6wVLH8= +golang.org/x/term v0.25.0/go.mod h1:RPyXicDX+6vLxogjjRxjgD2TKtmAO6NZBsBRfrOLu7M= golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d/go.mod h1:aiJjzUbINMkxbQROHiO6hDPo2LHcIPhhQsa9DLh0yGk= google.golang.org/genproto/googleapis/rpc v0.0.0-20240123012728-ef4313101c80/go.mod h1:PAREbraiVEVGVdTZsVWjSbbTtSyGbAgIIvni8a8CD5s= google.golang.org/genproto/googleapis/rpc v0.0.0-20240311173647-c811ad7063a7/go.mod h1:WtryC6hu0hhx87FDGxWCDptyssuo68sk10vYjF+T9fY= diff --git a/report/generated.d.ts b/report/generated.d.ts index 02a13de3..ccec5205 100644 --- a/report/generated.d.ts +++ b/report/generated.d.ts @@ -904,7 +904,7 @@ export interface paths { /** @description OK */ 200: { content: { - "application/json": components["schemas"]["db.PgTimezoneNamesListRow"][]; + "application/json": string[]; }; }; /** @description Bad Request */ @@ -4657,139 +4657,6 @@ export interface paths { }; }; }; - "/projects/{project_id}/instruments/{instrument_id}/constants": { - /** lists constants for a given instrument */ - get: { - parameters: { - path: { - /** @description project uuid */ - project_id: string; - /** @description instrument uuid */ - instrument_id: string; - }; - }; - responses: { - /** @description OK */ - 200: { - content: { - "application/json": components["schemas"]["db.VTimeseries"][]; - }; - }; - /** @description Bad Request */ - 400: { - content: { - "application/json": components["schemas"]["echo.HTTPError"]; - }; - }; - /** @description Not Found */ - 404: { - content: { - "application/json": components["schemas"]["echo.HTTPError"]; - }; - }; - /** @description Internal Server Error */ - 500: { - content: { - "application/json": components["schemas"]["echo.HTTPError"]; - }; - }; - }; - }; - /** creates instrument constants (i.e. timeseries) */ - post: { - parameters: { - query?: { - /** @description api key */ - key?: string; - }; - path: { - /** @description project uuid */ - project_id: string; - /** @description instrument uuid */ - instrument_id: string; - }; - }; - /** @description timeseries collection items payload */ - requestBody: { - content: { - "*/*": components["schemas"]["dto.TimeseriesCollectionItems"]; - }; - }; - responses: { - /** @description OK */ - 200: { - content: { - "application/json": components["schemas"]["db.TimeseriesCreateBatchRow"][]; - }; - }; - /** @description Bad Request */ - 400: { - content: { - "application/json": components["schemas"]["echo.HTTPError"]; - }; - }; - /** @description Not Found */ - 404: { - content: { - "application/json": components["schemas"]["echo.HTTPError"]; - }; - }; - /** @description Internal Server Error */ - 500: { - content: { - "application/json": components["schemas"]["echo.HTTPError"]; - }; - }; - }; - }; - }; - "/projects/{project_id}/instruments/{instrument_id}/constants/{timeseries_id}": { - /** removes a timeseries as an instrument constant */ - delete: { - parameters: { - query?: { - /** @description api key */ - key?: string; - }; - path: { - /** @description project uuid */ - project_id: string; - /** @description instrument uuid */ - instrument_id: string; - /** @description timeseries uuid */ - timeseries_id: string; - }; - }; - responses: { - /** @description OK */ - 200: { - content: { - "application/json": { - [key: string]: unknown; - }; - }; - }; - /** @description Bad Request */ - 400: { - content: { - "application/json": components["schemas"]["echo.HTTPError"]; - }; - }; - /** @description Not Found */ - 404: { - content: { - "application/json": components["schemas"]["echo.HTTPError"]; - }; - }; - /** @description Internal Server Error */ - 500: { - content: { - "application/json": components["schemas"]["echo.HTTPError"]; - }; - }; - }; - }; - }; "/projects/{project_id}/instruments/{instrument_id}/evaluations": { /** lists evaluations for a single instrument */ get: { @@ -6380,6 +6247,211 @@ export interface paths { }; }; }; + "/projects/{project_id}/survey123": { + /** lists Survey123 connections for a project */ + get: { + parameters: { + path: { + /** @description project uuid */ + project_id: string; + }; + }; + responses: { + /** @description OK */ + 200: { + content: { + "application/json": components["schemas"]["dto.Survey123"][]; + }; + }; + /** @description Bad Request */ + 400: { + content: { + "application/json": components["schemas"]["echo.HTTPError"]; + }; + }; + /** @description Not Found */ + 404: { + content: { + "application/json": components["schemas"]["echo.HTTPError"]; + }; + }; + /** @description Internal Server Error */ + 500: { + content: { + "application/json": components["schemas"]["echo.HTTPError"]; + }; + }; + }; + }; + /** creates a Survey123 connection with equivalency table mappings */ + post: { + parameters: { + path: { + /** @description project uuid */ + project_id: string; + }; + }; + /** @description survey123 payload */ + requestBody: { + content: { + "application/json": components["schemas"]["dto.Survey123"]; + }; + }; + responses: { + /** @description OK */ + 200: { + content: { + "application/json": { + [key: string]: string; + }; + }; + }; + /** @description Bad Request */ + 400: { + content: { + "application/json": components["schemas"]["echo.HTTPError"]; + }; + }; + /** @description Not Found */ + 404: { + content: { + "application/json": components["schemas"]["echo.HTTPError"]; + }; + }; + /** @description Internal Server Error */ + 500: { + content: { + "application/json": components["schemas"]["echo.HTTPError"]; + }; + }; + }; + }; + }; + "/projects/{project_id}/survey123/{survey123_id}": { + /** updates a Survey123 connection with equivalency table mappings */ + put: { + parameters: { + path: { + /** @description project uuid */ + project_id: string; + /** @description survey123 uuid */ + survey123_id: string; + }; + }; + /** @description survey123 payload */ + requestBody: { + content: { + "application/json": components["schemas"]["dto.Survey123"]; + }; + }; + responses: { + /** @description OK */ + 200: { + content: { + "application/json": { + [key: string]: string; + }; + }; + }; + /** @description Bad Request */ + 400: { + content: { + "application/json": components["schemas"]["echo.HTTPError"]; + }; + }; + /** @description Not Found */ + 404: { + content: { + "application/json": components["schemas"]["echo.HTTPError"]; + }; + }; + /** @description Internal Server Error */ + 500: { + content: { + "application/json": components["schemas"]["echo.HTTPError"]; + }; + }; + }; + }; + /** deletes a Survey123 connection with equivalency table mappings */ + delete: { + parameters: { + path: { + /** @description project uuid */ + project_id: string; + /** @description survey123 uuid */ + survey123_id: string; + }; + }; + responses: { + /** @description OK */ + 200: { + content: { + "application/json": { + [key: string]: string; + }; + }; + }; + /** @description Bad Request */ + 400: { + content: { + "application/json": components["schemas"]["echo.HTTPError"]; + }; + }; + /** @description Not Found */ + 404: { + content: { + "application/json": components["schemas"]["echo.HTTPError"]; + }; + }; + /** @description Internal Server Error */ + 500: { + content: { + "application/json": components["schemas"]["echo.HTTPError"]; + }; + }; + }; + }; + }; + "/projects/{project_id}/survey123/{survey123_id}/previews": { + /** gets the most recent Survey123 raw json payload sent from the webhook API */ + get: { + parameters: { + path: { + /** @description project uuid */ + project_id: string; + /** @description survey123 uuid */ + survey123_id: string; + }; + }; + responses: { + /** @description OK */ + 200: { + content: { + "application/json": string; + }; + }; + /** @description Bad Request */ + 400: { + content: { + "application/json": components["schemas"]["echo.HTTPError"]; + }; + }; + /** @description Not Found */ + 404: { + content: { + "application/json": components["schemas"]["echo.HTTPError"]; + }; + }; + /** @description Internal Server Error */ + 500: { + content: { + "application/json": components["schemas"]["echo.HTTPError"]; + }; + }; + }; + }; + }; "/projects/{project_id}/timeseries": { /** lists all timeseries for a single project */ get: { @@ -7717,20 +7789,6 @@ export interface components { "db.MeasurementLean": { [key: string]: number; }; - /** - * @example { - * "utc_offset": "utc_offset", - * "name": "name", - * "abbrev": "abbrev", - * "is_dst": true - * } - */ - "db.PgTimezoneNamesListRow": { - abbrev?: string; - is_dst?: boolean; - name?: string; - utc_offset?: string; - }; /** * @example { * "x": "{}", @@ -7949,17 +8007,19 @@ export interface components { /** * @example { * "timeseries_id": "timeseries_id", + * "instrument_field_name": "instrument_field_name", * "uploader_config_id": "uploader_config_id", * "field_name": "field_name" * } */ "db.UploaderConfigMapping": { field_name?: string; + instrument_field_name?: string; timeseries_id?: string; uploader_config_id?: string; }; /** @enum {string} */ - "db.UploaderConfigType": "csv" | "dux" | "toa5"; + "db.UploaderConfigType": "csv" | "dux" | "toa5" | "xlsx"; /** * @example { * "instruments": [ @@ -8930,29 +8990,33 @@ export interface components { }; /** * @example { - * "validated_field": "validated_field", * "created_at": "created_at", * "description": "description", - * "row_offset": 6, * "comment_field": "comment_field", * "type": "csv", - * "created_by": "created_by", * "updated_by_username": "updated_by_username", - * "time_field": "time_field", + * "xlsx_sheet_name": "xlsx_sheet_name", * "masked_field": "masked_field", * "tz_name": "tz_name", * "updated_at": "updated_at", * "comment_field_enabled": true, + * "instrument_field_enabled": true, * "project_id": "project_id", * "column_offset": 0, - * "name": "name", - * "updated_by": "updated_by", - * "masked_field_enabled": true, * "created_by_username": "created_by_username", * "id": "id", - * "validated_field_enabled": true, * "depth_based_instrument_id": "depth_based_instrument_id", - * "slug": "slug" + * "slug": "slug", + * "instrument_field": "instrument_field", + * "validated_field": "validated_field", + * "prefer_day_first": true, + * "row_offset": 6, + * "created_by": "created_by", + * "time_field": "time_field", + * "name": "name", + * "updated_by": "updated_by", + * "masked_field_enabled": true, + * "validated_field_enabled": true * } */ "db.VUploaderConfig": { @@ -8965,9 +9029,12 @@ export interface components { depth_based_instrument_id?: string; description?: string; id?: string; + instrument_field?: string; + instrument_field_enabled?: boolean; masked_field?: string; masked_field_enabled?: boolean; name?: string; + prefer_day_first?: boolean; project_id?: string; row_offset?: number; slug?: string; @@ -8979,6 +9046,7 @@ export interface components { updated_by_username?: string; validated_field?: string; validated_field_enabled?: boolean; + xlsx_sheet_name?: string; }; "dto.AlertConfig": { alert_email_subscriptions?: components["schemas"]["dto.EmailAutocompleteResult"][]; @@ -9605,6 +9673,66 @@ export interface components { y_timeseries_id?: string; z_timeseries_id?: string; }; + /** + * @example { + * "updated_by_username": "updated_by_username", + * "project_id": "project_id", + * "name": "name", + * "updated_by": "updated_by", + * "created_at": "created_at", + * "updatedd_at": "updatedd_at", + * "created_by_username": "created_by_username", + * "id": "id", + * "rows": [ + * { + * "timeseries_id": "timeseries_id", + * "display_name": "display_name", + * "instrument_id": "instrument_id", + * "field_name": "field_name" + * }, + * { + * "timeseries_id": "timeseries_id", + * "display_name": "display_name", + * "instrument_id": "instrument_id", + * "field_name": "field_name" + * } + * ], + * "created_by": "created_by", + * "errors": [ + * "errors", + * "errors" + * ], + * "slug": "slug" + * } + */ + "dto.Survey123": { + created_at?: string; + created_by?: string; + created_by_username?: string; + errors?: string[]; + id?: string; + name?: string; + project_id?: string; + rows?: components["schemas"]["dto.Survey123EquivalencyTableRow"][]; + slug?: string; + updated_by?: string; + updated_by_username?: string; + updatedd_at?: string; + }; + /** + * @example { + * "timeseries_id": "timeseries_id", + * "display_name": "display_name", + * "instrument_id": "instrument_id", + * "field_name": "field_name" + * } + */ + "dto.Survey123EquivalencyTableRow": { + display_name?: string; + field_name?: string; + instrument_id?: string; + timeseries_id?: string; + }; "dto.TextOption": { enabled?: boolean; value?: string; @@ -9736,8 +9864,11 @@ export interface components { created_at?: string; created_by?: string; created_by_username?: string; + depth_based_instrument_id?: string; description?: string; id?: string; + instrument_field?: string; + instrument_field_enabled?: boolean; masked_field?: string; masked_field_enabled?: boolean; name?: string; @@ -9752,19 +9883,22 @@ export interface components { updatedd_at?: string; validated_field?: string; validated_field_enabled?: boolean; + xlsx_sheet_name?: string; }; /** * @example { * "timeseries_id": "timeseries_id", + * "instrument_field_name": "instrument_field_name", * "field_name": "field_name" * } */ "dto.UploaderConfigMapping": { field_name?: string; + instrument_field_name?: string; timeseries_id?: string; }; /** @enum {string} */ - "dto.UploaderConfigType": "csv" | "dux" | "toa5"; + "dto.UploaderConfigType": "csv" | "dux" | "toa5" | "xlsx"; /** * @example { * "x": [ diff --git a/sqlc.yml b/sqlc.yml index 41618cfb..8dbd2efd 100644 --- a/sqlc.yml +++ b/sqlc.yml @@ -144,6 +144,12 @@ sql: type: SaaMeasurement slice: true + # v_survey123 + - column: v_survey123.fields + go_type: + type: Survey123EquivalencyTableField + slice: true + # v_report_config - column: v_report_config.plot_configs go_type: